aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml33
-rw-r--r--Cargo.lock38
-rw-r--r--README.md2
-rw-r--r--ROADMAP.md77
-rw-r--r--appveyor.yml19
-rw-r--r--crates/gen_lsp_server/src/lib.rs2
-rw-r--r--crates/gen_lsp_server/src/msg.rs2
-rw-r--r--crates/ra_analysis/Cargo.toml3
-rw-r--r--crates/ra_analysis/src/completion/mod.rs54
-rw-r--r--crates/ra_analysis/src/completion/reference_completion.rs106
-rw-r--r--crates/ra_analysis/src/db.rs125
-rw-r--r--crates/ra_analysis/src/descriptors/function/imp.rs21
-rw-r--r--crates/ra_analysis/src/descriptors/mod.rs92
-rw-r--r--crates/ra_analysis/src/descriptors/module/mod.rs236
-rw-r--r--crates/ra_analysis/src/descriptors/module/scope.rs124
-rw-r--r--crates/ra_analysis/src/imp.rs376
-rw-r--r--crates/ra_analysis/src/lib.rs44
-rw-r--r--crates/ra_analysis/src/mock_analysis.rs40
-rw-r--r--crates/ra_analysis/src/symbol_index.rs28
-rw-r--r--crates/ra_analysis/src/syntax_ptr.rs84
-rw-r--r--crates/ra_analysis/tests/tests.rs14
-rw-r--r--crates/ra_db/Cargo.toml14
-rw-r--r--crates/ra_db/src/file_resolver.rs76
-rw-r--r--crates/ra_db/src/input.rs (renamed from crates/ra_analysis/src/input.rs)37
-rw-r--r--crates/ra_db/src/lib.rs83
-rw-r--r--crates/ra_db/src/loc2id.rs100
-rw-r--r--crates/ra_db/src/mock.rs51
-rw-r--r--crates/ra_db/src/syntax_ptr.rs48
-rw-r--r--crates/ra_editor/src/code_actions.rs41
-rw-r--r--crates/ra_editor/src/lib.rs7
-rw-r--r--crates/ra_editor/src/line_index.rs219
-rw-r--r--crates/ra_hir/Cargo.toml17
-rw-r--r--crates/ra_hir/src/arena.rs66
-rw-r--r--crates/ra_hir/src/db.rs66
-rw-r--r--crates/ra_hir/src/function/mod.rs (renamed from crates/ra_analysis/src/descriptors/function/mod.rs)88
-rw-r--r--crates/ra_hir/src/function/scope.rs (renamed from crates/ra_analysis/src/descriptors/function/scope.rs)101
-rw-r--r--crates/ra_hir/src/lib.rs141
-rw-r--r--crates/ra_hir/src/mock.rs172
-rw-r--r--crates/ra_hir/src/module/imp.rs (renamed from crates/ra_analysis/src/descriptors/module/imp.rs)81
-rw-r--r--crates/ra_hir/src/module/mod.rs373
-rw-r--r--crates/ra_hir/src/module/nameres.rs434
-rw-r--r--crates/ra_hir/src/path.rs148
-rw-r--r--crates/ra_hir/src/query_definitions.rs154
-rw-r--r--crates/ra_lsp_server/src/caps.rs2
-rw-r--r--crates/ra_lsp_server/src/conv.rs12
-rw-r--r--crates/ra_lsp_server/src/main.rs24
-rw-r--r--crates/ra_lsp_server/src/main_loop/handlers.rs35
-rw-r--r--crates/ra_lsp_server/src/main_loop/mod.rs32
-rw-r--r--crates/ra_lsp_server/src/path_map.rs6
-rw-r--r--crates/ra_lsp_server/src/server_world.rs2
-rw-r--r--crates/ra_syntax/src/ast/generated.rs74
-rw-r--r--crates/ra_syntax/src/ast/mod.rs18
-rw-r--r--crates/ra_syntax/src/grammar.ron2
-rw-r--r--crates/ra_syntax/src/grammar/items/mod.rs2
-rw-r--r--crates/ra_syntax/src/grammar/paths.rs2
-rw-r--r--crates/ra_syntax/src/reparsing.rs6
-rw-r--r--crates/ra_syntax/src/string_lexing.rs414
-rw-r--r--crates/ra_syntax/src/string_lexing/byte.rs51
-rw-r--r--crates/ra_syntax/src/string_lexing/byte_string.rs51
-rw-r--r--crates/ra_syntax/src/string_lexing/char.rs176
-rw-r--r--crates/ra_syntax/src/string_lexing/mod.rs13
-rw-r--r--crates/ra_syntax/src/string_lexing/parser.rs201
-rw-r--r--crates/ra_syntax/src/string_lexing/string.rs46
-rw-r--r--crates/ra_syntax/src/validation/byte.rs211
-rw-r--r--crates/ra_syntax/src/validation/byte_string.rs178
-rw-r--r--crates/ra_syntax/src/validation/char.rs192
-rw-r--r--crates/ra_syntax/src/validation/mod.rs4
-rw-r--r--crates/ra_syntax/src/yellow/syntax_error.rs23
-rw-r--r--crates/tools/src/lib.rs2
-rw-r--r--editors/code/package-lock.json62
-rw-r--r--editors/code/package.json1
71 files changed, 4031 insertions, 1848 deletions
diff --git a/.travis.yml b/.travis.yml
index ecb3b76f4..36fd746dd 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -7,30 +7,33 @@ before_cache:
7env: 7env:
8 - CARGO_INCREMENTAL=0 8 - CARGO_INCREMENTAL=0
9 9
10build: &rust_build
11 language: rust
12 rust: beta
13 script:
14 - cargo gen-tests --verify
15 - cargo gen-syntax --verify
16 - cargo test
17
10matrix: 18matrix:
11 include: 19 include:
12 - language: rust 20 - os: linux
13 rust: beta 21 <<: *rust_build
14 script:
15 - cargo gen-syntax --verify
16 - cargo gen-tests --verify
17 - cargo test
18 # - language: rust
19 # rust: nightly
20 # before_script:
21 # - rustup component add clippy-preview
22 # - rustup component add rustfmt-preview
23 # script:
24 # - cargo fmt --all -- --check || true
25 # - cargo clippy
26 - language: node_js 22 - language: node_js
27 node_js: node 23 node_js: node
28 before_script: false 24 before_script: false
29 script: 25 script:
30 - cd editors/code && npm ci && npm run travis 26 - cd editors/code && npm ci && npm run travis
31 27
28 - os: windows
29 if: branch = master
30 before_script:
31 - dos2unix ./crates/ra_syntax/tests/data/parser/**/*.txt
32 - dos2unix ./crates/ra_syntax/tests/data/parser/**/*.rs
33 <<: *rust_build
34
32 allow_failures: 35 allow_failures:
33 - rust nightly 36 - os: windows
34 37
35branches: 38branches:
36 only: 39 only:
diff --git a/Cargo.lock b/Cargo.lock
index 18aac79ab..42a962cf6 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -351,6 +351,11 @@ version = "1.1.0"
351source = "registry+https://github.com/rust-lang/crates.io-index" 351source = "registry+https://github.com/rust-lang/crates.io-index"
352 352
353[[package]] 353[[package]]
354name = "id-arena"
355version = "2.0.0"
356source = "registry+https://github.com/rust-lang/crates.io-index"
357
358[[package]]
354name = "idna" 359name = "idna"
355version = "0.1.5" 360version = "0.1.5"
356source = "registry+https://github.com/rust-lang/crates.io-index" 361source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -600,7 +605,10 @@ version = "0.1.0"
600dependencies = [ 605dependencies = [
601 "fst 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", 606 "fst 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
602 "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", 607 "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
608 "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
609 "ra_db 0.1.0",
603 "ra_editor 0.1.0", 610 "ra_editor 0.1.0",
611 "ra_hir 0.1.0",
604 "ra_syntax 0.1.0", 612 "ra_syntax 0.1.0",
605 "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", 613 "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
606 "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 614 "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -622,6 +630,19 @@ dependencies = [
622] 630]
623 631
624[[package]] 632[[package]]
633name = "ra_db"
634version = "0.1.0"
635dependencies = [
636 "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
637 "ra_editor 0.1.0",
638 "ra_syntax 0.1.0",
639 "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
640 "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
641 "salsa 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
642 "test_utils 0.1.0",
643]
644
645[[package]]
625name = "ra_editor" 646name = "ra_editor"
626version = "0.1.0" 647version = "0.1.0"
627dependencies = [ 648dependencies = [
@@ -634,6 +655,22 @@ dependencies = [
634] 655]
635 656
636[[package]] 657[[package]]
658name = "ra_hir"
659version = "0.1.0"
660dependencies = [
661 "id-arena 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
662 "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
663 "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
664 "ra_db 0.1.0",
665 "ra_editor 0.1.0",
666 "ra_syntax 0.1.0",
667 "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
668 "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
669 "salsa 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
670 "test_utils 0.1.0",
671]
672
673[[package]]
637name = "ra_lsp_server" 674name = "ra_lsp_server"
638version = "0.1.0" 675version = "0.1.0"
639dependencies = [ 676dependencies = [
@@ -1300,6 +1337,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
1300"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" 1337"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
1301"checksum heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea04fa3ead4e05e51a7c806fc07271fdbde4e246a6c6d1efd52e72230b771b82" 1338"checksum heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea04fa3ead4e05e51a7c806fc07271fdbde4e246a6c6d1efd52e72230b771b82"
1302"checksum humansize 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6cab2627acfc432780848602f3f558f7e9dd427352224b0d9324025796d2a5e" 1339"checksum humansize 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6cab2627acfc432780848602f3f558f7e9dd427352224b0d9324025796d2a5e"
1340"checksum id-arena 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3a7250033feafee46a1cecd2c2616a64aec1d064f38c9ae2bdd297728542843e"
1303"checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" 1341"checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e"
1304"checksum im 12.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9c7f9bb8aee47fc16d535a705f7867a9fc83bb822e5e1043bb98e77ffeed3c" 1342"checksum im 12.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9c7f9bb8aee47fc16d535a705f7867a9fc83bb822e5e1043bb98e77ffeed3c"
1305"checksum indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7e81a7c05f79578dbc15793d8b619db9ba32b4577003ef3af1a91c416798c58d" 1343"checksum indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7e81a7c05f79578dbc15793d8b619db9ba32b4577003ef3af1a91c416798c58d"
diff --git a/README.md b/README.md
index 1c5398779..a6195237f 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,6 @@
1# Rust Analyzer 1# Rust Analyzer
2 2
3[![Build Status](https://travis-ci.org/rust-analyzer/rust-analyzer.svg?branch=master)](https://travis-ci.org/rust-analyzer/rust-analyzer) 3[![Build Status](https://travis-ci.org/rust-analyzer/rust-analyzer.svg?branch=master)](https://travis-ci.org/rust-analyzer/rust-analyzer)
4[![Build status](https://ci.appveyor.com/api/projects/status/vtt455oi3hjy9uvk/branch/master?svg=true)](https://ci.appveyor.com/project/matklad/rust-analyzer/branch/master)
5
6 4
7Rust Analyzer is an **experimental** modular compiler frontend for the 5Rust Analyzer is an **experimental** modular compiler frontend for the
8Rust language, which aims to lay a foundation for excellent IDE 6Rust language, which aims to lay a foundation for excellent IDE
diff --git a/ROADMAP.md b/ROADMAP.md
new file mode 100644
index 000000000..951a092b4
--- /dev/null
+++ b/ROADMAP.md
@@ -0,0 +1,77 @@
1# Rust Analyzer Roadmap 01
2
3Written on 2018-11-06, extends approximately to February 2019.
4After that, we should coordinate with the compiler/rls developers to align goals and share code and experience.
5
6
7# Overall Goals
8
9The mission is:
10 * Provide an excellent "code analyzed as you type" IDE experience for the Rust language,
11 * Implement the bulk of the features in Rust itself.
12
13
14High-level architecture constraints:
15 * Long-term, replace the current rustc frontend.
16 It's *obvious* that the code should be shared, but OTOH, all great IDEs started as from-scratch rewrites.
17 * Don't hard-code a particular protocol or mode of operation.
18 Produce a library which could be used for implementing an LSP server, or for in-process embedding.
19 * As long as possible, stick with stable Rust (NB: we currently use beta for 2018 edition and salsa).
20
21
22# Current Goals
23
24Ideally, we would be coordinating with the compiler/rls teams, but they are busy working on making Rust 2018 at the moment.
25The sync-up point will happen some time after the edition, probably early 2019.
26In the meantime, the goal is to **experiment**, specifically, to figure out how a from-scratch written RLS might look like.
27
28
29## Data Storage and Protocol implementation
30
31The fundamental part of any architecture is who owns which data, how the data is mutated and how the data is exposed to user.
32For storage we use the [salsa](http://github.com/salsa-rs/salsa) library, which provides a solid model that seems to be the way to go.
33
34Modification to source files is mostly driven by the language client, but we also should support watching the file system. The current
35file watching implementation is a stub.
36
37**Action Item:** implement reliable file watching service.
38
39We also should extract LSP bits as a reusable library. There's already `gen_lsp_server`, but it is pretty limited.
40
41**Action Item:** try using `gen_lsp_server` in more than one language server, for example for TOML and Nix.
42
43The ideal architecture for `gen_lsp_server` is still unclear. I'd rather avoid futures: they bring significant runtime complexity
44(call stacks become insane) and the performance benefits are negligible for our use case (one thread per request is perfectly OK given
45the low amount of requests a language server receives). The current interface is based on crossbeam-channel, but it's not clear
46if that is the best choice.
47
48
49## Low-effort, high payoff features
50
51Implementing 20% of type inference will give use 80% of completion.
52Thus it makes sense to partially implement name resolution, type inference and trait matching, even though there is a chance that
53this code is replaced later on when we integrate with the compiler
54
55Specifically, we need to:
56
57* **Action Item:** implement path resolution, so that we get completion in imports and such.
58* **Action Item:** implement simple type inference, so that we get completion for inherent methods.
59* **Action Item:** implement nicer completion infrastructure, so that we have icons, snippets, doc comments, after insert callbacks, ...
60
61
62## Dragons to kill
63
64To make experiments most effective, we should try to prototype solutions for the hardest problems.
65In the case of Rust, the two hardest problems are:
66 * Conditional compilation and source/model mismatch.
67 A single source file might correspond to several entities in the semantic model.
68 For example, different cfg flags produce effectively different crates from the same source.
69 * Macros are intertwined with name resolution in a single fix-point iteration algorithm.
70 This is just plain hard to implement, but also interacts poorly with on-demand.
71
72
73For the first bullet point, we need to design descriptors infra and explicit mapping step between sources and semantic model, which is intentionally fuzzy in one direction.
74The **action item** here is basically "write code, see what works, keep high-level picture in mind".
75
76For the second bullet point, there's hope that salsa with its deep memoization will result in a fast enough solution even without being fully on-demand.
77Again, the **action item** is to write the code and see what works. Salsa itself uses macros heavily, so it should be a great test.
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index a32a1e7b8..000000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,19 +0,0 @@
1os: Visual Studio 2015
2
3install:
4 - curl https://win.rustup.rs/ --output rustup-init.exe
5 - rustup-init -yv --default-toolchain beta --default-host x86_64-pc-windows-msvc
6 - set PATH=%PATH%;%USERPROFILE%\.cargo\bin
7 - rustc -vV
8 - cargo -vV
9
10build: false
11
12test_script:
13 - cargo test
14
15branches:
16 only:
17 - staging
18 - master
19 - trying
diff --git a/crates/gen_lsp_server/src/lib.rs b/crates/gen_lsp_server/src/lib.rs
index e45a6b5e2..5dab8f408 100644
--- a/crates/gen_lsp_server/src/lib.rs
+++ b/crates/gen_lsp_server/src/lib.rs
@@ -1,4 +1,4 @@
1//! A language server scaffold, exposing synchroneous crossbeam-channel based API. 1//! A language server scaffold, exposing a synchronous crossbeam-channel based API.
2//! This crate handles protocol handshaking and parsing messages, while you 2//! This crate handles protocol handshaking and parsing messages, while you
3//! control the message dispatch loop yourself. 3//! control the message dispatch loop yourself.
4//! 4//!
diff --git a/crates/gen_lsp_server/src/msg.rs b/crates/gen_lsp_server/src/msg.rs
index e0d0aeab5..e1b27c808 100644
--- a/crates/gen_lsp_server/src/msg.rs
+++ b/crates/gen_lsp_server/src/msg.rs
@@ -94,7 +94,7 @@ impl RawRequest {
94 R::Params: Serialize, 94 R::Params: Serialize,
95 { 95 {
96 RawRequest { 96 RawRequest {
97 id: id, 97 id,
98 method: R::METHOD.to_string(), 98 method: R::METHOD.to_string(),
99 params: to_value(params).unwrap(), 99 params: to_value(params).unwrap(),
100 } 100 }
diff --git a/crates/ra_analysis/Cargo.toml b/crates/ra_analysis/Cargo.toml
index 908ee1c81..fe9765a66 100644
--- a/crates/ra_analysis/Cargo.toml
+++ b/crates/ra_analysis/Cargo.toml
@@ -11,6 +11,9 @@ rayon = "1.0.2"
11fst = "0.3.1" 11fst = "0.3.1"
12salsa = "0.8.0" 12salsa = "0.8.0"
13rustc-hash = "1.0" 13rustc-hash = "1.0"
14parking_lot = "0.6.4"
14ra_syntax = { path = "../ra_syntax" } 15ra_syntax = { path = "../ra_syntax" }
15ra_editor = { path = "../ra_editor" } 16ra_editor = { path = "../ra_editor" }
17ra_db = { path = "../ra_db" }
18hir = { path = "../ra_hir", package = "ra_hir" }
16test_utils = { path = "../test_utils" } 19test_utils = { path = "../test_utils" }
diff --git a/crates/ra_analysis/src/completion/mod.rs b/crates/ra_analysis/src/completion/mod.rs
index 2e082705e..e5ba92acd 100644
--- a/crates/ra_analysis/src/completion/mod.rs
+++ b/crates/ra_analysis/src/completion/mod.rs
@@ -2,18 +2,16 @@ mod reference_completion;
2 2
3use ra_editor::find_node_at_offset; 3use ra_editor::find_node_at_offset;
4use ra_syntax::{ 4use ra_syntax::{
5 algo::find_leaf_at_offset,
6 algo::visit::{visitor_ctx, VisitorCtx}, 5 algo::visit::{visitor_ctx, VisitorCtx},
7 ast, 6 ast,
8 AstNode, AtomEdit, 7 AstNode, AtomEdit,
9 SyntaxNodeRef, 8 SyntaxNodeRef,
10}; 9};
10use ra_db::SyntaxDatabase;
11use rustc_hash::{FxHashMap}; 11use rustc_hash::{FxHashMap};
12 12
13use crate::{ 13use crate::{
14 db::{self, SyntaxDatabase}, 14 db,
15 descriptors::{DescriptorDatabase, module::ModuleSource},
16 input::{FilesDatabase},
17 Cancelable, FilePosition 15 Cancelable, FilePosition
18}; 16};
19 17
@@ -31,39 +29,21 @@ pub(crate) fn completions(
31 db: &db::RootDatabase, 29 db: &db::RootDatabase,
32 position: FilePosition, 30 position: FilePosition,
33) -> Cancelable<Option<Vec<CompletionItem>>> { 31) -> Cancelable<Option<Vec<CompletionItem>>> {
34 let original_file = db.file_syntax(position.file_id); 32 let original_file = db.source_file(position.file_id);
35 // Insert a fake ident to get a valid parse tree 33 // Insert a fake ident to get a valid parse tree
36 let file = { 34 let file = {
37 let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string()); 35 let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string());
38 original_file.reparse(&edit) 36 original_file.reparse(&edit)
39 }; 37 };
40 38
41 let leaf = match find_leaf_at_offset(original_file.syntax(), position.offset).left_biased() { 39 let module = ctry!(hir::Module::guess_from_position(db, position)?);
42 None => return Ok(None),
43 Some(it) => it,
44 };
45 let source_root_id = db.file_source_root(position.file_id);
46 let module_tree = db.module_tree(source_root_id)?;
47 let module_source = ModuleSource::for_node(position.file_id, leaf);
48 let module_id = match module_tree.any_module_for_source(module_source) {
49 None => return Ok(None),
50 Some(it) => it,
51 };
52 40
53 let mut res = Vec::new(); 41 let mut res = Vec::new();
54 let mut has_completions = false; 42 let mut has_completions = false;
55 // First, let's try to complete a reference to some declaration. 43 // First, let's try to complete a reference to some declaration.
56 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) { 44 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) {
57 has_completions = true; 45 has_completions = true;
58 reference_completion::completions( 46 reference_completion::completions(&mut res, db, &module, &file, name_ref)?;
59 &mut res,
60 db,
61 source_root_id,
62 &module_tree,
63 module_id,
64 &file,
65 name_ref,
66 )?;
67 // special case, `trait T { fn foo(i_am_a_name_ref) {} }` 47 // special case, `trait T { fn foo(i_am_a_name_ref) {} }`
68 if is_node::<ast::Param>(name_ref.syntax()) { 48 if is_node::<ast::Param>(name_ref.syntax()) {
69 param_completions(name_ref.syntax(), &mut res); 49 param_completions(name_ref.syntax(), &mut res);
@@ -219,9 +199,9 @@ mod tests {
219 <|> 199 <|>
220 } 200 }
221 ", 201 ",
222 r#"[CompletionItem { label: "Foo", lookup: None, snippet: None }, 202 r#"[CompletionItem { label: "quux", lookup: None, snippet: None },
223 CompletionItem { label: "Baz", lookup: None, snippet: None }, 203 CompletionItem { label: "Foo", lookup: None, snippet: None },
224 CompletionItem { label: "quux", lookup: None, snippet: None }]"#, 204 CompletionItem { label: "Baz", lookup: None, snippet: None }]"#,
225 ); 205 );
226 } 206 }
227 207
@@ -236,6 +216,20 @@ mod tests {
236 } 216 }
237 217
238 #[test] 218 #[test]
219 fn test_completion_self_path() {
220 check_scope_completion(
221 r"
222 use self::m::<|>;
223
224 mod m {
225 struct Bar;
226 }
227 ",
228 r#"[CompletionItem { label: "Bar", lookup: None, snippet: None }]"#,
229 );
230 }
231
232 #[test]
239 fn test_completion_mod_scope_nested() { 233 fn test_completion_mod_scope_nested() {
240 check_scope_completion( 234 check_scope_completion(
241 r" 235 r"
@@ -245,8 +239,8 @@ mod tests {
245 fn quux() { <|> } 239 fn quux() { <|> }
246 } 240 }
247 ", 241 ",
248 r#"[CompletionItem { label: "Bar", lookup: None, snippet: None }, 242 r#"[CompletionItem { label: "quux", lookup: None, snippet: None },
249 CompletionItem { label: "quux", lookup: None, snippet: None }]"#, 243 CompletionItem { label: "Bar", lookup: None, snippet: None }]"#,
250 ); 244 );
251 } 245 }
252 246
diff --git a/crates/ra_analysis/src/completion/reference_completion.rs b/crates/ra_analysis/src/completion/reference_completion.rs
index 6c5fd0be6..e1a2d5241 100644
--- a/crates/ra_analysis/src/completion/reference_completion.rs
+++ b/crates/ra_analysis/src/completion/reference_completion.rs
@@ -6,23 +6,23 @@ use ra_syntax::{
6 ast::{self, LoopBodyOwner}, 6 ast::{self, LoopBodyOwner},
7 SyntaxKind::*, 7 SyntaxKind::*,
8}; 8};
9use hir::{
10 self,
11 FnScopes,
12 Def,
13 Path,
14};
9 15
10use crate::{ 16use crate::{
11 db::RootDatabase, 17 db::RootDatabase,
12 input::{SourceRootId},
13 completion::CompletionItem, 18 completion::CompletionItem,
14 descriptors::module::{ModuleId, ModuleTree},
15 descriptors::function::FnScopes,
16 descriptors::DescriptorDatabase,
17 Cancelable 19 Cancelable
18}; 20};
19 21
20pub(super) fn completions( 22pub(super) fn completions(
21 acc: &mut Vec<CompletionItem>, 23 acc: &mut Vec<CompletionItem>,
22 db: &RootDatabase, 24 db: &RootDatabase,
23 source_root_id: SourceRootId, 25 module: &hir::Module,
24 module_tree: &ModuleTree,
25 module_id: ModuleId,
26 file: &SourceFileNode, 26 file: &SourceFileNode,
27 name_ref: ast::NameRef, 27 name_ref: ast::NameRef,
28) -> Cancelable<()> { 28) -> Cancelable<()> {
@@ -40,25 +40,28 @@ pub(super) fn completions(
40 complete_expr_snippets(acc); 40 complete_expr_snippets(acc);
41 } 41 }
42 42
43 let module_scope = db.module_scope(source_root_id, module_id)?; 43 let module_scope = module.scope(db)?;
44 acc.extend( 44 acc.extend(
45 module_scope 45 module_scope
46 .entries() 46 .entries()
47 .iter() 47 .filter(|(_name, res)| {
48 .filter(|entry| {
49 // Don't expose this item 48 // Don't expose this item
50 !entry.ptr().range().is_subrange(&name_ref.syntax().range()) 49 match res.import {
50 None => true,
51 Some(import) => {
52 let range = import.range(db, module.source().file_id());
53 !range.is_subrange(&name_ref.syntax().range())
54 }
55 }
51 }) 56 })
52 .map(|entry| CompletionItem { 57 .map(|(name, _res)| CompletionItem {
53 label: entry.name().to_string(), 58 label: name.to_string(),
54 lookup: None, 59 lookup: None,
55 snippet: None, 60 snippet: None,
56 }), 61 }),
57 ); 62 );
58 } 63 }
59 NameRefKind::CratePath(path) => { 64 NameRefKind::Path(path) => complete_path(acc, db, module, path)?,
60 complete_path(acc, db, source_root_id, module_tree, module_id, path)?
61 }
62 NameRefKind::BareIdentInMod => { 65 NameRefKind::BareIdentInMod => {
63 let name_range = name_ref.syntax().range(); 66 let name_range = name_ref.syntax().range();
64 let top_node = name_ref 67 let top_node = name_ref
@@ -82,8 +85,8 @@ enum NameRefKind<'a> {
82 LocalRef { 85 LocalRef {
83 enclosing_fn: Option<ast::FnDef<'a>>, 86 enclosing_fn: Option<ast::FnDef<'a>>,
84 }, 87 },
85 /// NameRef is the last segment in crate:: path 88 /// NameRef is the last segment in some path
86 CratePath(Vec<ast::NameRef<'a>>), 89 Path(Path),
87 /// NameRef is bare identifier at the module's root. 90 /// NameRef is bare identifier at the module's root.
88 /// Used for keyword completion 91 /// Used for keyword completion
89 BareIdentInMod, 92 BareIdentInMod,
@@ -105,8 +108,10 @@ fn classify_name_ref(name_ref: ast::NameRef) -> Option<NameRefKind> {
105 let parent = name_ref.syntax().parent()?; 108 let parent = name_ref.syntax().parent()?;
106 if let Some(segment) = ast::PathSegment::cast(parent) { 109 if let Some(segment) = ast::PathSegment::cast(parent) {
107 let path = segment.parent_path(); 110 let path = segment.parent_path();
108 if let Some(crate_path) = crate_path(path) { 111 if let Some(path) = Path::from_ast(path) {
109 return Some(NameRefKind::CratePath(crate_path)); 112 if !path.is_ident() {
113 return Some(NameRefKind::Path(path));
114 }
110 } 115 }
111 if path.qualifier().is_none() { 116 if path.qualifier().is_none() {
112 let enclosing_fn = name_ref 117 let enclosing_fn = name_ref
@@ -120,32 +125,6 @@ fn classify_name_ref(name_ref: ast::NameRef) -> Option<NameRefKind> {
120 None 125 None
121} 126}
122 127
123fn crate_path(mut path: ast::Path) -> Option<Vec<ast::NameRef>> {
124 let mut res = Vec::new();
125 loop {
126 let segment = path.segment()?;
127 match segment.kind()? {
128 ast::PathSegmentKind::Name(name) => res.push(name),
129 ast::PathSegmentKind::CrateKw => break,
130 ast::PathSegmentKind::SelfKw | ast::PathSegmentKind::SuperKw => return None,
131 }
132 path = qualifier(path)?;
133 }
134 res.reverse();
135 return Some(res);
136
137 fn qualifier(path: ast::Path) -> Option<ast::Path> {
138 if let Some(q) = path.qualifier() {
139 return Some(q);
140 }
141 // TODO: this bottom up traversal is not too precise.
142 // Should we handle do a top-down analysiss, recording results?
143 let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
144 let use_tree = use_tree_list.parent_use_tree();
145 use_tree.path()
146 }
147}
148
149fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) { 128fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) {
150 let mut shadowed = FxHashSet::default(); 129 let mut shadowed = FxHashSet::default();
151 acc.extend( 130 acc.extend(
@@ -171,18 +150,24 @@ fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<Completi
171fn complete_path( 150fn complete_path(
172 acc: &mut Vec<CompletionItem>, 151 acc: &mut Vec<CompletionItem>,
173 db: &RootDatabase, 152 db: &RootDatabase,
174 source_root_id: SourceRootId, 153 module: &hir::Module,
175 module_tree: &ModuleTree, 154 mut path: Path,
176 module_id: ModuleId,
177 crate_path: Vec<ast::NameRef>,
178) -> Cancelable<()> { 155) -> Cancelable<()> {
179 let target_module_id = match find_target_module(module_tree, module_id, crate_path) { 156 if path.segments.is_empty() {
157 return Ok(());
158 }
159 path.segments.pop();
160 let def_id = match module.resolve_path(db, path)? {
180 None => return Ok(()), 161 None => return Ok(()),
181 Some(it) => it, 162 Some(it) => it,
182 }; 163 };
183 let module_scope = db.module_scope(source_root_id, target_module_id)?; 164 let target_module = match def_id.resolve(db)? {
184 let completions = module_scope.entries().iter().map(|entry| CompletionItem { 165 Def::Module(it) => it,
185 label: entry.name().to_string(), 166 Def::Item => return Ok(()),
167 };
168 let module_scope = target_module.scope(db)?;
169 let completions = module_scope.entries().map(|(name, _res)| CompletionItem {
170 label: name.to_string(),
186 lookup: None, 171 lookup: None,
187 snippet: None, 172 snippet: None,
188 }); 173 });
@@ -190,19 +175,6 @@ fn complete_path(
190 Ok(()) 175 Ok(())
191} 176}
192 177
193fn find_target_module(
194 module_tree: &ModuleTree,
195 module_id: ModuleId,
196 mut crate_path: Vec<ast::NameRef>,
197) -> Option<ModuleId> {
198 crate_path.pop();
199 let mut target_module = module_id.root(&module_tree);
200 for name in crate_path {
201 target_module = target_module.child(module_tree, name.text().as_str())?;
202 }
203 Some(target_module)
204}
205
206fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) { 178fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
207 acc.push(CompletionItem { 179 acc.push(CompletionItem {
208 label: "tfn".to_string(), 180 label: "tfn".to_string(),
diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs
index 194f1a6b0..df2ef293d 100644
--- a/crates/ra_analysis/src/db.rs
+++ b/crates/ra_analysis/src/db.rs
@@ -1,23 +1,22 @@
1use std::sync::Arc; 1use std::sync::Arc;
2
3use ra_editor::LineIndex;
4use ra_syntax::{SourceFileNode, SyntaxNode};
5use salsa::{self, Database}; 2use salsa::{self, Database};
3use ra_db::{LocationIntener, BaseDatabase};
4use hir::{self, DefId, DefLoc, FnId, SourceItemId};
6 5
7use crate::{ 6use crate::{
8 db, 7 symbol_index,
9 descriptors::{
10 DescriptorDatabase, FnScopesQuery, FnSyntaxQuery, ModuleScopeQuery, ModuleTreeQuery,
11 SubmodulesQuery,
12 },
13 symbol_index::SymbolIndex,
14 syntax_ptr::SyntaxPtr,
15 Cancelable, Canceled, FileId,
16}; 8};
17 9
18#[derive(Debug)] 10#[derive(Debug)]
19pub(crate) struct RootDatabase { 11pub(crate) struct RootDatabase {
20 runtime: salsa::Runtime<RootDatabase>, 12 runtime: salsa::Runtime<RootDatabase>,
13 id_maps: Arc<IdMaps>,
14}
15
16#[derive(Debug, Default)]
17struct IdMaps {
18 fns: LocationIntener<SourceItemId, FnId>,
19 defs: LocationIntener<DefLoc, DefId>,
21} 20}
22 21
23impl salsa::Database for RootDatabase { 22impl salsa::Database for RootDatabase {
@@ -29,90 +28,68 @@ impl salsa::Database for RootDatabase {
29impl Default for RootDatabase { 28impl Default for RootDatabase {
30 fn default() -> RootDatabase { 29 fn default() -> RootDatabase {
31 let mut db = RootDatabase { 30 let mut db = RootDatabase {
32 runtime: Default::default(), 31 runtime: salsa::Runtime::default(),
32 id_maps: Default::default(),
33 }; 33 };
34 db.query_mut(crate::input::SourceRootQuery) 34 db.query_mut(ra_db::SourceRootQuery)
35 .set(crate::input::WORKSPACE, Default::default()); 35 .set(ra_db::WORKSPACE, Default::default());
36 db.query_mut(crate::input::CrateGraphQuery) 36 db.query_mut(ra_db::CrateGraphQuery)
37 .set((), Default::default()); 37 .set((), Default::default());
38 db.query_mut(crate::input::LibrariesQuery) 38 db.query_mut(ra_db::LibrariesQuery)
39 .set((), Default::default()); 39 .set((), Default::default());
40 db 40 db
41 } 41 }
42} 42}
43 43
44pub(crate) fn check_canceled(db: &impl salsa::Database) -> Cancelable<()> {
45 if db.salsa_runtime().is_current_revision_canceled() {
46 Err(Canceled)
47 } else {
48 Ok(())
49 }
50}
51
52impl salsa::ParallelDatabase for RootDatabase { 44impl salsa::ParallelDatabase for RootDatabase {
53 fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { 45 fn snapshot(&self) -> salsa::Snapshot<RootDatabase> {
54 salsa::Snapshot::new(RootDatabase { 46 salsa::Snapshot::new(RootDatabase {
55 runtime: self.runtime.snapshot(self), 47 runtime: self.runtime.snapshot(self),
48 id_maps: self.id_maps.clone(),
56 }) 49 })
57 } 50 }
58} 51}
59 52
60salsa::database_storage! { 53impl BaseDatabase for RootDatabase {}
61 pub(crate) struct RootDatabaseStorage for RootDatabase { 54
62 impl crate::input::FilesDatabase { 55impl AsRef<LocationIntener<DefLoc, DefId>> for RootDatabase {
63 fn file_text() for crate::input::FileTextQuery; 56 fn as_ref(&self) -> &LocationIntener<DefLoc, DefId> {
64 fn file_source_root() for crate::input::FileSourceRootQuery; 57 &self.id_maps.defs
65 fn source_root() for crate::input::SourceRootQuery;
66 fn libraries() for crate::input::LibrariesQuery;
67 fn library_symbols() for crate::input::LibrarySymbolsQuery;
68 fn crate_graph() for crate::input::CrateGraphQuery;
69 }
70 impl SyntaxDatabase {
71 fn file_syntax() for FileSyntaxQuery;
72 fn file_lines() for FileLinesQuery;
73 fn file_symbols() for FileSymbolsQuery;
74 fn resolve_syntax_ptr() for ResolveSyntaxPtrQuery;
75 }
76 impl DescriptorDatabase {
77 fn module_tree() for ModuleTreeQuery;
78 fn module_descriptor() for SubmodulesQuery;
79 fn module_scope() for ModuleScopeQuery;
80 fn fn_syntax() for FnSyntaxQuery;
81 fn fn_scopes() for FnScopesQuery;
82 }
83 } 58 }
84} 59}
85 60
86salsa::query_group! { 61impl AsRef<LocationIntener<hir::SourceItemId, FnId>> for RootDatabase {
87 pub(crate) trait SyntaxDatabase: crate::input::FilesDatabase { 62 fn as_ref(&self) -> &LocationIntener<hir::SourceItemId, FnId> {
88 fn file_syntax(file_id: FileId) -> SourceFileNode { 63 &self.id_maps.fns
89 type FileSyntaxQuery; 64 }
65}
66
67salsa::database_storage! {
68 pub(crate) struct RootDatabaseStorage for RootDatabase {
69 impl ra_db::FilesDatabase {
70 fn file_text() for ra_db::FileTextQuery;
71 fn file_source_root() for ra_db::FileSourceRootQuery;
72 fn source_root() for ra_db::SourceRootQuery;
73 fn libraries() for ra_db::LibrariesQuery;
74 fn crate_graph() for ra_db::CrateGraphQuery;
90 } 75 }
91 fn file_lines(file_id: FileId) -> Arc<LineIndex> { 76 impl ra_db::SyntaxDatabase {
92 type FileLinesQuery; 77 fn source_file() for ra_db::SourceFileQuery;
78 fn file_lines() for ra_db::FileLinesQuery;
93 } 79 }
94 fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { 80 impl symbol_index::SymbolsDatabase {
95 type FileSymbolsQuery; 81 fn file_symbols() for symbol_index::FileSymbolsQuery;
82 fn library_symbols() for symbol_index::LibrarySymbolsQuery;
96 } 83 }
97 fn resolve_syntax_ptr(ptr: SyntaxPtr) -> SyntaxNode { 84 impl hir::db::HirDatabase {
98 type ResolveSyntaxPtrQuery; 85 fn module_tree() for hir::db::ModuleTreeQuery;
99 // Don't retain syntax trees in memory 86 fn fn_scopes() for hir::db::FnScopesQuery;
100 storage volatile; 87 fn file_items() for hir::db::SourceFileItemsQuery;
101 use fn crate::syntax_ptr::resolve_syntax_ptr; 88 fn file_item() for hir::db::FileItemQuery;
89 fn input_module_items() for hir::db::InputModuleItemsQuery;
90 fn item_map() for hir::db::ItemMapQuery;
91 fn fn_syntax() for hir::db::FnSyntaxQuery;
92 fn submodules() for hir::db::SubmodulesQuery;
102 } 93 }
103 } 94 }
104} 95}
105
106fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> SourceFileNode {
107 let text = db.file_text(file_id);
108 SourceFileNode::parse(&*text)
109}
110fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
111 let text = db.file_text(file_id);
112 Arc::new(LineIndex::new(&*text))
113}
114fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
115 db::check_canceled(db)?;
116 let syntax = db.file_syntax(file_id);
117 Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
118}
diff --git a/crates/ra_analysis/src/descriptors/function/imp.rs b/crates/ra_analysis/src/descriptors/function/imp.rs
deleted file mode 100644
index a989a04cd..000000000
--- a/crates/ra_analysis/src/descriptors/function/imp.rs
+++ /dev/null
@@ -1,21 +0,0 @@
1use std::sync::Arc;
2
3use ra_syntax::ast::{AstNode, FnDef, FnDefNode};
4
5use crate::descriptors::{
6 function::{FnId, FnScopes},
7 DescriptorDatabase,
8};
9
10/// Resolve `FnId` to the corresponding `SyntaxNode`
11/// TODO: this should return something more type-safe then `SyntaxNode`
12pub(crate) fn fn_syntax(db: &impl DescriptorDatabase, fn_id: FnId) -> FnDefNode {
13 let syntax = db.resolve_syntax_ptr(fn_id.0);
14 FnDef::cast(syntax.borrowed()).unwrap().owned()
15}
16
17pub(crate) fn fn_scopes(db: &impl DescriptorDatabase, fn_id: FnId) -> Arc<FnScopes> {
18 let syntax = db.fn_syntax(fn_id);
19 let res = FnScopes::new(syntax.borrowed());
20 Arc::new(res)
21}
diff --git a/crates/ra_analysis/src/descriptors/mod.rs b/crates/ra_analysis/src/descriptors/mod.rs
deleted file mode 100644
index 56bde3849..000000000
--- a/crates/ra_analysis/src/descriptors/mod.rs
+++ /dev/null
@@ -1,92 +0,0 @@
1pub(crate) mod function;
2pub(crate) mod module;
3
4use std::sync::Arc;
5
6use ra_syntax::{
7 ast::{self, AstNode, FnDefNode},
8 TextRange,
9};
10
11use crate::{
12 db::SyntaxDatabase,
13 descriptors::function::{resolve_local_name, FnId, FnScopes},
14 descriptors::module::{ModuleId, ModuleScope, ModuleTree, ModuleSource},
15 input::SourceRootId,
16 syntax_ptr::LocalSyntaxPtr,
17 Cancelable,
18};
19
20salsa::query_group! {
21 pub(crate) trait DescriptorDatabase: SyntaxDatabase {
22 fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
23 type ModuleTreeQuery;
24 use fn module::imp::module_tree;
25 }
26 fn submodules(source: ModuleSource) -> Cancelable<Arc<Vec<module::imp::Submodule>>> {
27 type SubmodulesQuery;
28 use fn module::imp::submodules;
29 }
30 fn module_scope(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<ModuleScope>> {
31 type ModuleScopeQuery;
32 use fn module::imp::module_scope;
33 }
34 fn fn_syntax(fn_id: FnId) -> FnDefNode {
35 type FnSyntaxQuery;
36 // Don't retain syntax trees in memory
37 storage volatile;
38 use fn function::imp::fn_syntax;
39 }
40 fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> {
41 type FnScopesQuery;
42 use fn function::imp::fn_scopes;
43 }
44 }
45}
46
47#[derive(Debug)]
48pub struct ReferenceDescriptor {
49 pub range: TextRange,
50 pub name: String,
51}
52
53#[derive(Debug)]
54pub struct DeclarationDescriptor<'a> {
55 pat: ast::BindPat<'a>,
56 pub range: TextRange,
57}
58
59impl<'a> DeclarationDescriptor<'a> {
60 pub fn new(pat: ast::BindPat) -> DeclarationDescriptor {
61 let range = pat.syntax().range();
62
63 DeclarationDescriptor { pat, range }
64 }
65
66 pub fn find_all_refs(&self) -> Vec<ReferenceDescriptor> {
67 let name_ptr = LocalSyntaxPtr::new(self.pat.syntax());
68
69 let fn_def = match self.pat.syntax().ancestors().find_map(ast::FnDef::cast) {
70 Some(def) => def,
71 None => return Default::default(),
72 };
73
74 let fn_scopes = FnScopes::new(fn_def);
75
76 let refs: Vec<_> = fn_def
77 .syntax()
78 .descendants()
79 .filter_map(ast::NameRef::cast)
80 .filter(|name_ref| match resolve_local_name(*name_ref, &fn_scopes) {
81 None => false,
82 Some(entry) => entry.ptr() == name_ptr,
83 })
84 .map(|name_ref| ReferenceDescriptor {
85 name: name_ref.syntax().text().to_string(),
86 range: name_ref.syntax().range(),
87 })
88 .collect();
89
90 refs
91 }
92}
diff --git a/crates/ra_analysis/src/descriptors/module/mod.rs b/crates/ra_analysis/src/descriptors/module/mod.rs
deleted file mode 100644
index bc1148b22..000000000
--- a/crates/ra_analysis/src/descriptors/module/mod.rs
+++ /dev/null
@@ -1,236 +0,0 @@
1pub(super) mod imp;
2pub(crate) mod scope;
3
4use ra_syntax::{
5 ast::{self, AstNode, NameOwner},
6 SmolStr, SyntaxNode, SyntaxNodeRef,
7};
8use relative_path::RelativePathBuf;
9
10use crate::{db::SyntaxDatabase, syntax_ptr::SyntaxPtr, FileId};
11
12pub(crate) use self::scope::ModuleScope;
13
14/// Phisically, rust source is organized as a set of files, but logically it is
15/// organized as a tree of modules. Usually, a single file corresponds to a
16/// single module, but it is not nessary the case.
17///
18/// Module encapsulate the logic of transitioning from the fuzzy world of files
19/// (which can have multiple parents) to the precise world of modules (which
20/// always have one parent).
21#[derive(Debug, PartialEq, Eq, Hash)]
22pub(crate) struct ModuleTree {
23 mods: Vec<ModuleData>,
24 links: Vec<LinkData>,
25}
26
27impl ModuleTree {
28 pub(crate) fn modules_for_source(&self, source: ModuleSource) -> Vec<ModuleId> {
29 self.mods
30 .iter()
31 .enumerate()
32 .filter(|(_idx, it)| it.source == source)
33 .map(|(idx, _)| ModuleId(idx as u32))
34 .collect()
35 }
36
37 pub(crate) fn any_module_for_source(&self, source: ModuleSource) -> Option<ModuleId> {
38 self.modules_for_source(source).pop()
39 }
40}
41
42/// `ModuleSource` is the syntax tree element that produced this module:
43/// either a file, or an inlinde module.
44#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
45pub(crate) enum ModuleSource {
46 SourceFile(FileId),
47 #[allow(dead_code)]
48 Module(SyntaxPtr),
49}
50
51/// An owned syntax node for a module. Unlike `ModuleSource`,
52/// this holds onto the AST for the whole file.
53enum ModuleSourceNode {
54 SourceFile(ast::SourceFileNode),
55 Module(ast::ModuleNode),
56}
57
58#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
59pub(crate) struct ModuleId(u32);
60
61#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
62pub(crate) struct LinkId(u32);
63
64#[derive(Clone, Debug, Hash, PartialEq, Eq)]
65pub enum Problem {
66 UnresolvedModule {
67 candidate: RelativePathBuf,
68 },
69 NotDirOwner {
70 move_to: RelativePathBuf,
71 candidate: RelativePathBuf,
72 },
73}
74
75impl ModuleId {
76 pub(crate) fn source(self, tree: &ModuleTree) -> ModuleSource {
77 tree.module(self).source
78 }
79 pub(crate) fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> {
80 tree.module(self).parent
81 }
82 pub(crate) fn parent(self, tree: &ModuleTree) -> Option<ModuleId> {
83 let link = self.parent_link(tree)?;
84 Some(tree.link(link).owner)
85 }
86 pub(crate) fn root(self, tree: &ModuleTree) -> ModuleId {
87 let mut curr = self;
88 let mut i = 0;
89 while let Some(next) = curr.parent(tree) {
90 curr = next;
91 i += 1;
92 // simplistic cycle detection
93 if i > 100 {
94 return self;
95 }
96 }
97 curr
98 }
99 pub(crate) fn child(self, tree: &ModuleTree, name: &str) -> Option<ModuleId> {
100 let link = tree
101 .module(self)
102 .children
103 .iter()
104 .map(|&it| tree.link(it))
105 .find(|it| it.name == name)?;
106 Some(*link.points_to.first()?)
107 }
108 pub(crate) fn problems(
109 self,
110 tree: &ModuleTree,
111 db: &impl SyntaxDatabase,
112 ) -> Vec<(SyntaxNode, Problem)> {
113 tree.module(self)
114 .children
115 .iter()
116 .filter_map(|&it| {
117 let p = tree.link(it).problem.clone()?;
118 let s = it.bind_source(tree, db);
119 let s = s.borrowed().name().unwrap().syntax().owned();
120 Some((s, p))
121 })
122 .collect()
123 }
124}
125
126impl LinkId {
127 pub(crate) fn owner(self, tree: &ModuleTree) -> ModuleId {
128 tree.link(self).owner
129 }
130 pub(crate) fn bind_source<'a>(
131 self,
132 tree: &ModuleTree,
133 db: &impl SyntaxDatabase,
134 ) -> ast::ModuleNode {
135 let owner = self.owner(tree);
136 match owner.source(tree).resolve(db) {
137 ModuleSourceNode::SourceFile(root) => {
138 let ast = imp::modules(root.borrowed())
139 .find(|(name, _)| name == &tree.link(self).name)
140 .unwrap()
141 .1;
142 ast.owned()
143 }
144 ModuleSourceNode::Module(it) => it,
145 }
146 }
147}
148
149#[derive(Debug, PartialEq, Eq, Hash)]
150struct ModuleData {
151 source: ModuleSource,
152 parent: Option<LinkId>,
153 children: Vec<LinkId>,
154}
155
156impl ModuleSource {
157 pub(crate) fn for_node(file_id: FileId, node: SyntaxNodeRef) -> ModuleSource {
158 for node in node.ancestors() {
159 if let Some(m) = ast::Module::cast(node) {
160 if !m.has_semi() {
161 return ModuleSource::new_inline(file_id, m);
162 }
163 }
164 }
165 ModuleSource::SourceFile(file_id)
166 }
167 pub(crate) fn new_inline(file_id: FileId, module: ast::Module) -> ModuleSource {
168 assert!(!module.has_semi());
169 let ptr = SyntaxPtr::new(file_id, module.syntax());
170 ModuleSource::Module(ptr)
171 }
172
173 pub(crate) fn as_file(self) -> Option<FileId> {
174 match self {
175 ModuleSource::SourceFile(f) => Some(f),
176 ModuleSource::Module(..) => None,
177 }
178 }
179
180 pub(crate) fn file_id(self) -> FileId {
181 match self {
182 ModuleSource::SourceFile(f) => f,
183 ModuleSource::Module(ptr) => ptr.file_id(),
184 }
185 }
186
187 fn resolve(self, db: &impl SyntaxDatabase) -> ModuleSourceNode {
188 match self {
189 ModuleSource::SourceFile(file_id) => {
190 let syntax = db.file_syntax(file_id);
191 ModuleSourceNode::SourceFile(syntax.ast().owned())
192 }
193 ModuleSource::Module(ptr) => {
194 let syntax = db.resolve_syntax_ptr(ptr);
195 let syntax = syntax.borrowed();
196 let module = ast::Module::cast(syntax).unwrap();
197 ModuleSourceNode::Module(module.owned())
198 }
199 }
200 }
201}
202
203#[derive(Hash, Debug, PartialEq, Eq)]
204struct LinkData {
205 owner: ModuleId,
206 name: SmolStr,
207 points_to: Vec<ModuleId>,
208 problem: Option<Problem>,
209}
210
211impl ModuleTree {
212 fn module(&self, id: ModuleId) -> &ModuleData {
213 &self.mods[id.0 as usize]
214 }
215 fn module_mut(&mut self, id: ModuleId) -> &mut ModuleData {
216 &mut self.mods[id.0 as usize]
217 }
218 fn link(&self, id: LinkId) -> &LinkData {
219 &self.links[id.0 as usize]
220 }
221 fn link_mut(&mut self, id: LinkId) -> &mut LinkData {
222 &mut self.links[id.0 as usize]
223 }
224
225 fn push_mod(&mut self, data: ModuleData) -> ModuleId {
226 let id = ModuleId(self.mods.len() as u32);
227 self.mods.push(data);
228 id
229 }
230 fn push_link(&mut self, data: LinkData) -> LinkId {
231 let id = LinkId(self.links.len() as u32);
232 self.mods[data.owner.0 as usize].children.push(id);
233 self.links.push(data);
234 id
235 }
236}
diff --git a/crates/ra_analysis/src/descriptors/module/scope.rs b/crates/ra_analysis/src/descriptors/module/scope.rs
deleted file mode 100644
index 4490228e4..000000000
--- a/crates/ra_analysis/src/descriptors/module/scope.rs
+++ /dev/null
@@ -1,124 +0,0 @@
1//! Backend for module-level scope resolution & completion
2
3use ra_syntax::{ast, AstNode, SmolStr};
4
5use crate::syntax_ptr::LocalSyntaxPtr;
6
7/// `ModuleScope` contains all named items declared in the scope.
8#[derive(Debug, PartialEq, Eq)]
9pub(crate) struct ModuleScope {
10 entries: Vec<Entry>,
11}
12
13/// `Entry` is a single named declaration iside a module.
14#[derive(Debug, PartialEq, Eq)]
15pub(crate) struct Entry {
16 ptr: LocalSyntaxPtr,
17 kind: EntryKind,
18 name: SmolStr,
19}
20
21#[derive(Debug, PartialEq, Eq)]
22enum EntryKind {
23 Item,
24 Import,
25}
26
27impl ModuleScope {
28 pub(super) fn new<'a>(items: impl Iterator<Item = ast::ModuleItem<'a>>) -> ModuleScope {
29 let mut entries = Vec::new();
30 for item in items {
31 let entry = match item {
32 ast::ModuleItem::StructDef(item) => Entry::new(item),
33 ast::ModuleItem::EnumDef(item) => Entry::new(item),
34 ast::ModuleItem::FnDef(item) => Entry::new(item),
35 ast::ModuleItem::ConstDef(item) => Entry::new(item),
36 ast::ModuleItem::StaticDef(item) => Entry::new(item),
37 ast::ModuleItem::TraitDef(item) => Entry::new(item),
38 ast::ModuleItem::TypeDef(item) => Entry::new(item),
39 ast::ModuleItem::Module(item) => Entry::new(item),
40 ast::ModuleItem::UseItem(item) => {
41 if let Some(tree) = item.use_tree() {
42 collect_imports(tree, &mut entries);
43 }
44 continue;
45 }
46 ast::ModuleItem::ExternCrateItem(_) | ast::ModuleItem::ImplItem(_) => continue,
47 };
48 entries.extend(entry)
49 }
50
51 ModuleScope { entries }
52 }
53
54 pub fn entries(&self) -> &[Entry] {
55 self.entries.as_slice()
56 }
57}
58
59impl Entry {
60 fn new<'a>(item: impl ast::NameOwner<'a>) -> Option<Entry> {
61 let name = item.name()?;
62 Some(Entry {
63 name: name.text(),
64 ptr: LocalSyntaxPtr::new(name.syntax()),
65 kind: EntryKind::Item,
66 })
67 }
68 fn new_import(path: ast::Path) -> Option<Entry> {
69 let name_ref = path.segment()?.name_ref()?;
70 Some(Entry {
71 name: name_ref.text(),
72 ptr: LocalSyntaxPtr::new(name_ref.syntax()),
73 kind: EntryKind::Import,
74 })
75 }
76 pub fn name(&self) -> &SmolStr {
77 &self.name
78 }
79 pub fn ptr(&self) -> LocalSyntaxPtr {
80 self.ptr
81 }
82}
83
84fn collect_imports(tree: ast::UseTree, acc: &mut Vec<Entry>) {
85 if let Some(use_tree_list) = tree.use_tree_list() {
86 return use_tree_list
87 .use_trees()
88 .for_each(|it| collect_imports(it, acc));
89 }
90 if let Some(path) = tree.path() {
91 acc.extend(Entry::new_import(path));
92 }
93}
94
95#[cfg(test)]
96mod tests {
97 use super::*;
98 use ra_syntax::{ast::ModuleItemOwner, SourceFileNode};
99
100 fn do_check(code: &str, expected: &[&str]) {
101 let file = SourceFileNode::parse(&code);
102 let scope = ModuleScope::new(file.ast().items());
103 let actual = scope.entries.iter().map(|it| it.name()).collect::<Vec<_>>();
104 assert_eq!(expected, actual.as_slice());
105 }
106
107 #[test]
108 fn test_module_scope() {
109 do_check(
110 "
111 struct Foo;
112 enum Bar {}
113 mod baz {}
114 fn quux() {}
115 use x::{
116 y::z,
117 t,
118 };
119 type T = ();
120 ",
121 &["Foo", "Bar", "baz", "quux", "z", "t", "T"],
122 )
123 }
124}
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs
index 74c248a96..f5cb3550e 100644
--- a/crates/ra_analysis/src/imp.rs
+++ b/crates/ra_analysis/src/imp.rs
@@ -1,91 +1,33 @@
1use std::{ 1use std::{
2 fmt, 2 fmt,
3 hash::{Hash, Hasher},
4 sync::Arc, 3 sync::Arc,
5}; 4};
6 5
7use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit}; 6use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
8use ra_syntax::{ 7use ra_syntax::{
9 ast::{self, ArgListOwner, Expr, NameOwner}, 8 ast::{self, ArgListOwner, Expr, NameOwner},
10 AstNode, SourceFileNode, SmolStr, 9 AstNode, SourceFileNode,
11 SyntaxKind::*, 10 SyntaxKind::*,
12 SyntaxNodeRef, TextRange, TextUnit, 11 SyntaxNodeRef, TextRange, TextUnit,
13}; 12};
13use ra_db::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE, SyntaxDatabase, SourceFileQuery};
14use rayon::prelude::*; 14use rayon::prelude::*;
15use relative_path::RelativePath;
16use rustc_hash::FxHashSet; 15use rustc_hash::FxHashSet;
17use salsa::{Database, ParallelDatabase}; 16use salsa::{Database, ParallelDatabase};
17use hir::{
18 self,
19 FnSignatureInfo,
20 Problem,
21};
18 22
19use crate::{ 23use crate::{
20 completion::{completions, CompletionItem}, 24 completion::{completions, CompletionItem},
21 db::{self, FileSyntaxQuery, SyntaxDatabase}, 25 db,
22 descriptors::{ 26 symbol_index::{SymbolIndex, SymbolsDatabase},
23 function::{FnDescriptor, FnId}, 27 AnalysisChange, Cancelable, CrateId, Diagnostic, FileId,
24 module::{ModuleSource, ModuleTree, Problem},
25 DeclarationDescriptor, DescriptorDatabase,
26 },
27 input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE},
28 symbol_index::SymbolIndex,
29 AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver,
30 FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit, 28 FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit,
31}; 29};
32 30
33#[derive(Clone, Debug)]
34pub(crate) struct FileResolverImp {
35 inner: Arc<FileResolver>,
36}
37
38impl PartialEq for FileResolverImp {
39 fn eq(&self, other: &FileResolverImp) -> bool {
40 self.inner() == other.inner()
41 }
42}
43
44impl Eq for FileResolverImp {}
45
46impl Hash for FileResolverImp {
47 fn hash<H: Hasher>(&self, hasher: &mut H) {
48 self.inner().hash(hasher);
49 }
50}
51
52impl FileResolverImp {
53 pub(crate) fn new(inner: Arc<FileResolver>) -> FileResolverImp {
54 FileResolverImp { inner }
55 }
56 pub(crate) fn file_stem(&self, file_id: FileId) -> String {
57 self.inner.file_stem(file_id)
58 }
59 pub(crate) fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
60 self.inner.resolve(file_id, path)
61 }
62 fn inner(&self) -> *const FileResolver {
63 &*self.inner
64 }
65}
66
67impl Default for FileResolverImp {
68 fn default() -> FileResolverImp {
69 #[derive(Debug)]
70 struct DummyResolver;
71 impl FileResolver for DummyResolver {
72 fn file_stem(&self, _file_: FileId) -> String {
73 panic!("file resolver not set")
74 }
75 fn resolve(
76 &self,
77 _file_id: FileId,
78 _path: &::relative_path::RelativePath,
79 ) -> Option<FileId> {
80 panic!("file resolver not set")
81 }
82 }
83 FileResolverImp {
84 inner: Arc::new(DummyResolver),
85 }
86 }
87}
88
89#[derive(Debug, Default)] 31#[derive(Debug, Default)]
90pub(crate) struct AnalysisHostImpl { 32pub(crate) struct AnalysisHostImpl {
91 db: db::RootDatabase, 33 db: db::RootDatabase,
@@ -102,7 +44,7 @@ impl AnalysisHostImpl {
102 44
103 for (file_id, text) in change.files_changed { 45 for (file_id, text) in change.files_changed {
104 self.db 46 self.db
105 .query_mut(crate::input::FileTextQuery) 47 .query_mut(ra_db::FileTextQuery)
106 .set(file_id, Arc::new(text)) 48 .set(file_id, Arc::new(text))
107 } 49 }
108 if !(change.files_added.is_empty() && change.files_removed.is_empty()) { 50 if !(change.files_added.is_empty() && change.files_removed.is_empty()) {
@@ -112,22 +54,22 @@ impl AnalysisHostImpl {
112 let mut source_root = SourceRoot::clone(&self.db.source_root(WORKSPACE)); 54 let mut source_root = SourceRoot::clone(&self.db.source_root(WORKSPACE));
113 for (file_id, text) in change.files_added { 55 for (file_id, text) in change.files_added {
114 self.db 56 self.db
115 .query_mut(crate::input::FileTextQuery) 57 .query_mut(ra_db::FileTextQuery)
116 .set(file_id, Arc::new(text)); 58 .set(file_id, Arc::new(text));
117 self.db 59 self.db
118 .query_mut(crate::input::FileSourceRootQuery) 60 .query_mut(ra_db::FileSourceRootQuery)
119 .set(file_id, crate::input::WORKSPACE); 61 .set(file_id, ra_db::WORKSPACE);
120 source_root.files.insert(file_id); 62 source_root.files.insert(file_id);
121 } 63 }
122 for file_id in change.files_removed { 64 for file_id in change.files_removed {
123 self.db 65 self.db
124 .query_mut(crate::input::FileTextQuery) 66 .query_mut(ra_db::FileTextQuery)
125 .set(file_id, Arc::new(String::new())); 67 .set(file_id, Arc::new(String::new()));
126 source_root.files.remove(&file_id); 68 source_root.files.remove(&file_id);
127 } 69 }
128 source_root.file_resolver = file_resolver; 70 source_root.file_resolver = file_resolver;
129 self.db 71 self.db
130 .query_mut(crate::input::SourceRootQuery) 72 .query_mut(ra_db::SourceRootQuery)
131 .set(WORKSPACE, Arc::new(source_root)) 73 .set(WORKSPACE, Arc::new(source_root))
132 } 74 }
133 if !change.libraries_added.is_empty() { 75 if !change.libraries_added.is_empty() {
@@ -138,11 +80,16 @@ impl AnalysisHostImpl {
138 let mut files = FxHashSet::default(); 80 let mut files = FxHashSet::default();
139 for (file_id, text) in library.files { 81 for (file_id, text) in library.files {
140 files.insert(file_id); 82 files.insert(file_id);
83 log::debug!(
84 "library file: {:?} {:?}",
85 file_id,
86 library.file_resolver.debug_path(file_id)
87 );
141 self.db 88 self.db
142 .query_mut(crate::input::FileSourceRootQuery) 89 .query_mut(ra_db::FileSourceRootQuery)
143 .set_constant(file_id, source_root_id); 90 .set_constant(file_id, source_root_id);
144 self.db 91 self.db
145 .query_mut(crate::input::FileTextQuery) 92 .query_mut(ra_db::FileTextQuery)
146 .set_constant(file_id, Arc::new(text)); 93 .set_constant(file_id, Arc::new(text));
147 } 94 }
148 let source_root = SourceRoot { 95 let source_root = SourceRoot {
@@ -150,19 +97,19 @@ impl AnalysisHostImpl {
150 file_resolver: library.file_resolver, 97 file_resolver: library.file_resolver,
151 }; 98 };
152 self.db 99 self.db
153 .query_mut(crate::input::SourceRootQuery) 100 .query_mut(ra_db::SourceRootQuery)
154 .set(source_root_id, Arc::new(source_root)); 101 .set(source_root_id, Arc::new(source_root));
155 self.db 102 self.db
156 .query_mut(crate::input::LibrarySymbolsQuery) 103 .query_mut(crate::symbol_index::LibrarySymbolsQuery)
157 .set(source_root_id, Arc::new(library.symbol_index)); 104 .set(source_root_id, Arc::new(library.symbol_index));
158 } 105 }
159 self.db 106 self.db
160 .query_mut(crate::input::LibrariesQuery) 107 .query_mut(ra_db::LibrariesQuery)
161 .set((), Arc::new(libraries)); 108 .set((), Arc::new(libraries));
162 } 109 }
163 if let Some(crate_graph) = change.crate_graph { 110 if let Some(crate_graph) = change.crate_graph {
164 self.db 111 self.db
165 .query_mut(crate::input::CrateGraphQuery) 112 .query_mut(ra_db::CrateGraphQuery)
166 .set((), Arc::new(crate_graph)) 113 .set((), Arc::new(crate_graph))
167 } 114 }
168 } 115 }
@@ -181,7 +128,7 @@ impl fmt::Debug for AnalysisImpl {
181 128
182impl AnalysisImpl { 129impl AnalysisImpl {
183 pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode { 130 pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode {
184 self.db.file_syntax(file_id) 131 self.db.source_file(file_id)
185 } 132 }
186 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { 133 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
187 self.db.file_lines(file_id) 134 self.db.file_lines(file_id)
@@ -212,59 +159,48 @@ impl AnalysisImpl {
212 .collect() 159 .collect()
213 }; 160 };
214 self.db 161 self.db
215 .query(FileSyntaxQuery) 162 .query(SourceFileQuery)
216 .sweep(salsa::SweepStrategy::default().discard_values()); 163 .sweep(salsa::SweepStrategy::default().discard_values());
217 Ok(query.search(&buf)) 164 Ok(query.search(&buf))
218 } 165 }
219 fn module_tree(&self, file_id: FileId) -> Cancelable<Arc<ModuleTree>> { 166 /// This return `Vec`: a module may be included from several places. We
220 let source_root = self.db.file_source_root(file_id); 167 /// don't handle this case yet though, so the Vec has length at most one.
221 self.db.module_tree(source_root)
222 }
223 pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> { 168 pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> {
224 let module_tree = self.module_tree(position.file_id)?; 169 let descr = match hir::Module::guess_from_position(&*self.db, position)? {
225 let file = self.db.file_syntax(position.file_id); 170 None => return Ok(Vec::new()),
226 let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) 171 Some(it) => it,
227 {
228 Some(m) if !m.has_semi() => ModuleSource::new_inline(position.file_id, m),
229 _ => ModuleSource::SourceFile(position.file_id),
230 }; 172 };
231 173 let (file_id, decl) = match descr.parent_link_source(&*self.db) {
232 let res = module_tree 174 None => return Ok(Vec::new()),
233 .modules_for_source(module_source) 175 Some(it) => it,
234 .into_iter() 176 };
235 .filter_map(|module_id| { 177 let decl = decl.borrowed();
236 let link = module_id.parent_link(&module_tree)?; 178 let decl_name = decl.name().unwrap();
237 let file_id = link.owner(&module_tree).source(&module_tree).file_id(); 179 let sym = FileSymbol {
238 let decl = link.bind_source(&module_tree, &*self.db); 180 name: decl_name.text(),
239 let decl = decl.borrowed(); 181 node_range: decl_name.syntax().range(),
240 182 kind: MODULE,
241 let decl_name = decl.name().unwrap(); 183 };
242 184 Ok(vec![(file_id, sym)])
243 let sym = FileSymbol {
244 name: decl_name.text(),
245 node_range: decl_name.syntax().range(),
246 kind: MODULE,
247 };
248 Some((file_id, sym))
249 })
250 .collect();
251 Ok(res)
252 } 185 }
186 /// Returns `Vec` for the same reason as `parent_module`
253 pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { 187 pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
254 let module_tree = self.module_tree(file_id)?; 188 let descr = match hir::Module::guess_from_file_id(&*self.db, file_id)? {
255 let crate_graph = self.db.crate_graph(); 189 None => return Ok(Vec::new()),
256 let res = module_tree 190 Some(it) => it,
257 .modules_for_source(ModuleSource::SourceFile(file_id)) 191 };
258 .into_iter() 192 let root = descr.crate_root();
259 .map(|it| it.root(&module_tree)) 193 let file_id = root
260 .filter_map(|it| it.source(&module_tree).as_file()) 194 .source()
261 .filter_map(|it| crate_graph.crate_id_for_crate_root(it)) 195 .as_file()
262 .collect(); 196 .expect("root module always has a file as a source");
263 197
264 Ok(res) 198 let crate_graph = self.db.crate_graph();
199 let crate_id = crate_graph.crate_id_for_crate_root(file_id);
200 Ok(crate_id.into_iter().collect())
265 } 201 }
266 pub fn crate_root(&self, crate_id: CrateId) -> FileId { 202 pub fn crate_root(&self, crate_id: CrateId) -> FileId {
267 self.db.crate_graph().crate_roots[&crate_id] 203 self.db.crate_graph().crate_root(crate_id)
268 } 204 }
269 pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { 205 pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> {
270 completions(&self.db, position) 206 completions(&self.db, position)
@@ -273,51 +209,51 @@ impl AnalysisImpl {
273 &self, 209 &self,
274 position: FilePosition, 210 position: FilePosition,
275 ) -> Cancelable<Vec<(FileId, FileSymbol)>> { 211 ) -> Cancelable<Vec<(FileId, FileSymbol)>> {
276 let module_tree = self.module_tree(position.file_id)?; 212 let file = self.db.source_file(position.file_id);
277 let file = self.db.file_syntax(position.file_id);
278 let syntax = file.syntax(); 213 let syntax = file.syntax();
279 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { 214 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
280 // First try to resolve the symbol locally 215 if let Some(fn_descr) =
281 return if let Some((name, range)) = 216 hir::Function::guess_for_name_ref(&*self.db, position.file_id, name_ref)
282 resolve_local_name(&self.db, position.file_id, name_ref)
283 { 217 {
284 let mut vec = vec![]; 218 let scope = fn_descr.scope(&*self.db);
285 vec.push(( 219 // First try to resolve the symbol locally
286 position.file_id, 220 return if let Some(entry) = scope.resolve_local_name(name_ref) {
287 FileSymbol { 221 let mut vec = vec![];
288 name, 222 vec.push((
289 node_range: range, 223 position.file_id,
290 kind: NAME, 224 FileSymbol {
291 }, 225 name: entry.name().clone(),
292 )); 226 node_range: entry.ptr().range(),
293 Ok(vec) 227 kind: NAME,
294 } else { 228 },
295 // If that fails try the index based approach. 229 ));
296 self.index_resolve(name_ref) 230 Ok(vec)
297 }; 231 } else {
232 // If that fails try the index based approach.
233 self.index_resolve(name_ref)
234 };
235 }
298 } 236 }
299 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { 237 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) {
300 if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { 238 if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
301 if module.has_semi() { 239 if module.has_semi() {
302 let file_ids = self.resolve_module(&*module_tree, position.file_id, module); 240 let parent_module =
303 241 hir::Module::guess_from_file_id(&*self.db, position.file_id)?;
304 let res = file_ids 242 let child_name = module.name();
305 .into_iter() 243 match (parent_module, child_name) {
306 .map(|id| { 244 (Some(parent_module), Some(child_name)) => {
307 let name = module 245 if let Some(child) = parent_module.child(&child_name.text()) {
308 .name() 246 let file_id = child.source().file_id();
309 .map(|n| n.text()) 247 let symbol = FileSymbol {
310 .unwrap_or_else(|| SmolStr::new("")); 248 name: child_name.text(),
311 let symbol = FileSymbol { 249 node_range: TextRange::offset_len(0.into(), 0.into()),
312 name, 250 kind: MODULE,
313 node_range: TextRange::offset_len(0.into(), 0.into()), 251 };
314 kind: MODULE, 252 return Ok(vec![(file_id, symbol)]);
315 }; 253 }
316 (id, symbol) 254 }
317 }) 255 _ => (),
318 .collect(); 256 }
319
320 return Ok(res);
321 } 257 }
322 } 258 }
323 } 259 }
@@ -325,32 +261,42 @@ impl AnalysisImpl {
325 } 261 }
326 262
327 pub fn find_all_refs(&self, position: FilePosition) -> Vec<(FileId, TextRange)> { 263 pub fn find_all_refs(&self, position: FilePosition) -> Vec<(FileId, TextRange)> {
328 let file = self.db.file_syntax(position.file_id); 264 let file = self.db.source_file(position.file_id);
329 let syntax = file.syntax();
330
331 // Find the binding associated with the offset 265 // Find the binding associated with the offset
332 let maybe_binding = 266 let (binding, descr) = match find_binding(&self.db, &file, position) {
333 find_node_at_offset::<ast::BindPat>(syntax, position.offset).or_else(|| {
334 let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
335 let resolved = resolve_local_name(&self.db, position.file_id, name_ref)?;
336 find_node_at_offset::<ast::BindPat>(syntax, resolved.1.end())
337 });
338
339 let binding = match maybe_binding {
340 None => return Vec::new(), 267 None => return Vec::new(),
341 Some(it) => it, 268 Some(it) => it,
342 }; 269 };
343 270
344 let decl = DeclarationDescriptor::new(binding); 271 let mut ret = vec![(position.file_id, binding.syntax().range())];
345
346 let mut ret = vec![(position.file_id, decl.range)];
347 ret.extend( 272 ret.extend(
348 decl.find_all_refs() 273 descr
274 .scope(&*self.db)
275 .find_all_refs(binding)
349 .into_iter() 276 .into_iter()
350 .map(|ref_desc| (position.file_id, ref_desc.range)), 277 .map(|ref_desc| (position.file_id, ref_desc.range)),
351 ); 278 );
352 279
353 ret 280 return ret;
281
282 fn find_binding<'a>(
283 db: &db::RootDatabase,
284 source_file: &'a SourceFileNode,
285 position: FilePosition,
286 ) -> Option<(ast::BindPat<'a>, hir::Function)> {
287 let syntax = source_file.syntax();
288 if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
289 let descr = hir::Function::guess_for_bind_pat(db, position.file_id, binding)?;
290 return Some((binding, descr));
291 };
292 let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
293 let descr = hir::Function::guess_for_name_ref(db, position.file_id, name_ref)?;
294 let scope = descr.scope(db);
295 let resolved = scope.resolve_local_name(name_ref)?;
296 let resolved = resolved.ptr().resolve(source_file);
297 let binding = find_node_at_offset::<ast::BindPat>(syntax, resolved.range().end())?;
298 Some((binding, descr))
299 }
354 } 300 }
355 301
356 pub fn doc_comment_for( 302 pub fn doc_comment_for(
@@ -358,14 +304,13 @@ impl AnalysisImpl {
358 file_id: FileId, 304 file_id: FileId,
359 symbol: FileSymbol, 305 symbol: FileSymbol,
360 ) -> Cancelable<Option<String>> { 306 ) -> Cancelable<Option<String>> {
361 let file = self.db.file_syntax(file_id); 307 let file = self.db.source_file(file_id);
362 308
363 Ok(symbol.docs(&file)) 309 Ok(symbol.docs(&file))
364 } 310 }
365 311
366 pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { 312 pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
367 let module_tree = self.module_tree(file_id)?; 313 let syntax = self.db.source_file(file_id);
368 let syntax = self.db.file_syntax(file_id);
369 314
370 let mut res = ra_editor::diagnostics(&syntax) 315 let mut res = ra_editor::diagnostics(&syntax)
371 .into_iter() 316 .into_iter()
@@ -375,8 +320,8 @@ impl AnalysisImpl {
375 fix: None, 320 fix: None,
376 }) 321 })
377 .collect::<Vec<_>>(); 322 .collect::<Vec<_>>();
378 if let Some(m) = module_tree.any_module_for_source(ModuleSource::SourceFile(file_id)) { 323 if let Some(m) = hir::Module::guess_from_file_id(&*self.db, file_id)? {
379 for (name_node, problem) in m.problems(&module_tree, &*self.db) { 324 for (name_node, problem) in m.problems(&*self.db) {
380 let diag = match problem { 325 let diag = match problem {
381 Problem::UnresolvedModule { candidate } => { 326 Problem::UnresolvedModule { candidate } => {
382 let create_file = FileSystemEdit::CreateFile { 327 let create_file = FileSystemEdit::CreateFile {
@@ -452,27 +397,22 @@ impl AnalysisImpl {
452 pub fn resolve_callable( 397 pub fn resolve_callable(
453 &self, 398 &self,
454 position: FilePosition, 399 position: FilePosition,
455 ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { 400 ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> {
456 let file = self.db.file_syntax(position.file_id); 401 let file = self.db.source_file(position.file_id);
457 let syntax = file.syntax(); 402 let syntax = file.syntax();
458 403
459 // Find the calling expression and it's NameRef 404 // Find the calling expression and it's NameRef
460 let calling_node = match FnCallNode::with_node(syntax, position.offset) { 405 let calling_node = ctry!(FnCallNode::with_node(syntax, position.offset));
461 Some(node) => node, 406 let name_ref = ctry!(calling_node.name_ref());
462 None => return Ok(None),
463 };
464 let name_ref = match calling_node.name_ref() {
465 Some(name) => name,
466 None => return Ok(None),
467 };
468 407
469 // Resolve the function's NameRef (NOTE: this isn't entirely accurate). 408 // Resolve the function's NameRef (NOTE: this isn't entirely accurate).
470 let file_symbols = self.index_resolve(name_ref)?; 409 let file_symbols = self.index_resolve(name_ref)?;
471 for (fn_fiel_id, fs) in file_symbols { 410 for (fn_file_id, fs) in file_symbols {
472 if fs.kind == FN_DEF { 411 if fs.kind == FN_DEF {
473 let fn_file = self.db.file_syntax(fn_fiel_id); 412 let fn_file = self.db.source_file(fn_file_id);
474 if let Some(fn_def) = find_node_at_offset(fn_file.syntax(), fs.node_range.start()) { 413 if let Some(fn_def) = find_node_at_offset(fn_file.syntax(), fs.node_range.start()) {
475 if let Some(descriptor) = FnDescriptor::new(fn_def) { 414 let descr = hir::Function::guess_from_source(&*self.db, fn_file_id, fn_def);
415 if let Some(descriptor) = descr.signature_info(&*self.db) {
476 // If we have a calling expression let's find which argument we are on 416 // If we have a calling expression let's find which argument we are on
477 let mut current_parameter = None; 417 let mut current_parameter = None;
478 418
@@ -526,27 +466,6 @@ impl AnalysisImpl {
526 query.limit(4); 466 query.limit(4);
527 self.world_symbols(query) 467 self.world_symbols(query)
528 } 468 }
529
530 fn resolve_module(
531 &self,
532 module_tree: &ModuleTree,
533 file_id: FileId,
534 module: ast::Module,
535 ) -> Vec<FileId> {
536 let name = match module.name() {
537 Some(name) => name.text(),
538 None => return Vec::new(),
539 };
540 let module_id = match module_tree.any_module_for_source(ModuleSource::SourceFile(file_id)) {
541 Some(id) => id,
542 None => return Vec::new(),
543 };
544 module_id
545 .child(module_tree, name.as_str())
546 .and_then(|it| it.source(&module_tree).as_file())
547 .into_iter()
548 .collect()
549 }
550} 469}
551 470
552impl SourceChange { 471impl SourceChange {
@@ -566,16 +485,6 @@ impl SourceChange {
566 } 485 }
567} 486}
568 487
569impl CrateGraph {
570 fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
571 let (&crate_id, _) = self
572 .crate_roots
573 .iter()
574 .find(|(_crate_id, &root_id)| root_id == file_id)?;
575 Some(crate_id)
576 }
577}
578
579enum FnCallNode<'a> { 488enum FnCallNode<'a> {
580 CallExpr(ast::CallExpr<'a>), 489 CallExpr(ast::CallExpr<'a>),
581 MethodCallExpr(ast::MethodCallExpr<'a>), 490 MethodCallExpr(ast::MethodCallExpr<'a>),
@@ -614,16 +523,3 @@ impl<'a> FnCallNode<'a> {
614 } 523 }
615 } 524 }
616} 525}
617
618fn resolve_local_name(
619 db: &db::RootDatabase,
620 file_id: FileId,
621 name_ref: ast::NameRef,
622) -> Option<(SmolStr, TextRange)> {
623 let fn_def = name_ref.syntax().ancestors().find_map(ast::FnDef::cast)?;
624 let fn_id = FnId::new(file_id, fn_def);
625 let scopes = db.fn_scopes(fn_id);
626 let scope_entry = crate::descriptors::function::resolve_local_name(name_ref, &scopes)?;
627 let syntax = db.resolve_syntax_ptr(scope_entry.ptr().into_global(file_id));
628 Some((scope_entry.name().clone(), syntax.range()))
629}
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs
index ad0273edc..12df580ba 100644
--- a/crates/ra_analysis/src/lib.rs
+++ b/crates/ra_analysis/src/lib.rs
@@ -1,5 +1,5 @@
1//! ra_analyzer crate is the brain of Rust analyzer. It relies on the `salsa` 1//! ra_analyzer crate is the brain of Rust analyzer. It relies on the `salsa`
2//! crate, which provides and incremental on-deman database of facts. 2//! crate, which provides and incremental on-demand database of facts.
3 3
4extern crate fst; 4extern crate fst;
5extern crate ra_editor; 5extern crate ra_editor;
@@ -9,47 +9,45 @@ extern crate relative_path;
9extern crate rustc_hash; 9extern crate rustc_hash;
10extern crate salsa; 10extern crate salsa;
11 11
12macro_rules! ctry {
13 ($expr:expr) => {
14 match $expr {
15 None => return Ok(None),
16 Some(it) => it,
17 }
18 };
19}
20
12mod db; 21mod db;
13mod input;
14mod imp; 22mod imp;
15mod completion; 23mod completion;
16mod descriptors;
17mod symbol_index; 24mod symbol_index;
18mod syntax_ptr;
19pub mod mock_analysis; 25pub mod mock_analysis;
20 26
21use std::{fmt, sync::Arc}; 27use std::{fmt, sync::Arc};
22 28
23use ra_syntax::{AtomEdit, SourceFileNode, TextRange, TextUnit}; 29use ra_syntax::{AtomEdit, SourceFileNode, TextRange, TextUnit};
30use ra_db::FileResolverImp;
24use rayon::prelude::*; 31use rayon::prelude::*;
25use relative_path::RelativePathBuf; 32use relative_path::RelativePathBuf;
26 33
27use crate::{ 34use crate::{
28 imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp}, 35 imp::{AnalysisHostImpl, AnalysisImpl},
29 symbol_index::SymbolIndex, 36 symbol_index::SymbolIndex,
30}; 37};
31 38
32pub use crate::{ 39pub use crate::{
33 completion::CompletionItem, 40 completion::CompletionItem,
34 descriptors::function::FnDescriptor,
35 input::{CrateGraph, CrateId, FileId, FileResolver},
36}; 41};
37pub use ra_editor::{ 42pub use ra_editor::{
38 FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, RunnableKind, StructureNode, 43 FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, RunnableKind, StructureNode,
39}; 44};
45pub use hir::FnSignatureInfo;
40 46
41#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] 47pub use ra_db::{
42pub struct Canceled; 48 Canceled, Cancelable, FilePosition,
43 49 CrateGraph, CrateId, FileId, FileResolver
44pub type Cancelable<T> = Result<T, Canceled>; 50};
45
46impl std::fmt::Display for Canceled {
47 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
48 fmt.write_str("Canceled")
49 }
50}
51
52impl std::error::Error for Canceled {}
53 51
54#[derive(Default)] 52#[derive(Default)]
55pub struct AnalysisChange { 53pub struct AnalysisChange {
@@ -119,12 +117,6 @@ impl AnalysisHost {
119 } 117 }
120} 118}
121 119
122#[derive(Clone, Copy, Debug)]
123pub struct FilePosition {
124 pub file_id: FileId,
125 pub offset: TextUnit,
126}
127
128#[derive(Debug)] 120#[derive(Debug)]
129pub struct SourceChange { 121pub struct SourceChange {
130 pub label: String, 122 pub label: String,
@@ -294,7 +286,7 @@ impl Analysis {
294 pub fn resolve_callable( 286 pub fn resolve_callable(
295 &self, 287 &self,
296 position: FilePosition, 288 position: FilePosition,
297 ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { 289 ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> {
298 self.imp.resolve_callable(position) 290 self.imp.resolve_callable(position)
299 } 291 }
300} 292}
diff --git a/crates/ra_analysis/src/mock_analysis.rs b/crates/ra_analysis/src/mock_analysis.rs
index 8e8f969f4..0d9a7a147 100644
--- a/crates/ra_analysis/src/mock_analysis.rs
+++ b/crates/ra_analysis/src/mock_analysis.rs
@@ -1,9 +1,10 @@
1use std::sync::Arc; 1use std::sync::Arc;
2 2
3use relative_path::{RelativePath, RelativePathBuf}; 3use relative_path::{RelativePathBuf};
4use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; 4use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER};
5use ra_db::mock::FileMap;
5 6
6use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FileResolver, FilePosition}; 7use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FilePosition};
7 8
8/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis 9/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis
9/// from a set of in-memory files. 10/// from a set of in-memory files.
@@ -76,16 +77,15 @@ impl MockAnalysis {
76 } 77 }
77 pub fn analysis_host(self) -> AnalysisHost { 78 pub fn analysis_host(self) -> AnalysisHost {
78 let mut host = AnalysisHost::default(); 79 let mut host = AnalysisHost::default();
79 let mut file_map = Vec::new(); 80 let mut file_map = FileMap::default();
80 let mut change = AnalysisChange::new(); 81 let mut change = AnalysisChange::new();
81 for (id, (path, contents)) in self.files.into_iter().enumerate() { 82 for (path, contents) in self.files.into_iter() {
82 let file_id = FileId((id + 1) as u32);
83 assert!(path.starts_with('/')); 83 assert!(path.starts_with('/'));
84 let path = RelativePathBuf::from_path(&path[1..]).unwrap(); 84 let path = RelativePathBuf::from_path(&path[1..]).unwrap();
85 let file_id = file_map.add(path);
85 change.add_file(file_id, contents); 86 change.add_file(file_id, contents);
86 file_map.push((file_id, path));
87 } 87 }
88 change.set_file_resolver(Arc::new(FileMap(file_map))); 88 change.set_file_resolver(Arc::new(file_map));
89 host.apply_change(change); 89 host.apply_change(change);
90 host 90 host
91 } 91 }
@@ -113,29 +113,3 @@ pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) {
113 let pos = mock.add_file_with_position("/main.rs", code); 113 let pos = mock.add_file_with_position("/main.rs", code);
114 (mock.analysis(), pos) 114 (mock.analysis(), pos)
115} 115}
116
117#[derive(Debug)]
118struct FileMap(Vec<(FileId, RelativePathBuf)>);
119
120impl FileMap {
121 fn iter<'a>(&'a self) -> impl Iterator<Item = (FileId, &'a RelativePath)> + 'a {
122 self.0
123 .iter()
124 .map(|(id, path)| (*id, path.as_relative_path()))
125 }
126
127 fn path(&self, id: FileId) -> &RelativePath {
128 self.iter().find(|&(it, _)| it == id).unwrap().1
129 }
130}
131
132impl FileResolver for FileMap {
133 fn file_stem(&self, id: FileId) -> String {
134 self.path(id).file_stem().unwrap().to_string()
135 }
136 fn resolve(&self, id: FileId, rel: &RelativePath) -> Option<FileId> {
137 let path = self.path(id).join(rel).normalize();
138 let id = self.iter().find(|&(_, p)| path == p)?.0;
139 Some(id)
140 }
141}
diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs
index 3a0667ecd..b48a37229 100644
--- a/crates/ra_analysis/src/symbol_index.rs
+++ b/crates/ra_analysis/src/symbol_index.rs
@@ -4,14 +4,36 @@ use std::{
4}; 4};
5 5
6use fst::{self, Streamer}; 6use fst::{self, Streamer};
7use ra_editor::{file_symbols, FileSymbol}; 7use ra_editor::{self, FileSymbol};
8use ra_syntax::{ 8use ra_syntax::{
9 SourceFileNode, 9 SourceFileNode,
10 SyntaxKind::{self, *}, 10 SyntaxKind::{self, *},
11}; 11};
12use ra_db::{SyntaxDatabase, SourceRootId};
12use rayon::prelude::*; 13use rayon::prelude::*;
13 14
14use crate::{FileId, Query}; 15use crate::{
16 Cancelable,
17 FileId, Query,
18};
19
20salsa::query_group! {
21 pub(crate) trait SymbolsDatabase: SyntaxDatabase {
22 fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
23 type FileSymbolsQuery;
24 }
25 fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> {
26 type LibrarySymbolsQuery;
27 storage input;
28 }
29 }
30}
31
32fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
33 db.check_canceled()?;
34 let syntax = db.source_file(file_id);
35 Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
36}
15 37
16#[derive(Default, Debug)] 38#[derive(Default, Debug)]
17pub(crate) struct SymbolIndex { 39pub(crate) struct SymbolIndex {
@@ -39,7 +61,7 @@ impl SymbolIndex {
39 ) -> SymbolIndex { 61 ) -> SymbolIndex {
40 let mut symbols = files 62 let mut symbols = files
41 .flat_map(|(file_id, file)| { 63 .flat_map(|(file_id, file)| {
42 file_symbols(&file) 64 ra_editor::file_symbols(&file)
43 .into_iter() 65 .into_iter()
44 .map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol))) 66 .map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol)))
45 .collect::<Vec<_>>() 67 .collect::<Vec<_>>()
diff --git a/crates/ra_analysis/src/syntax_ptr.rs b/crates/ra_analysis/src/syntax_ptr.rs
deleted file mode 100644
index 194b94584..000000000
--- a/crates/ra_analysis/src/syntax_ptr.rs
+++ /dev/null
@@ -1,84 +0,0 @@
1use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
2
3use crate::db::SyntaxDatabase;
4use crate::FileId;
5
6pub(crate) fn resolve_syntax_ptr(db: &impl SyntaxDatabase, ptr: SyntaxPtr) -> SyntaxNode {
7 let syntax = db.file_syntax(ptr.file_id);
8 ptr.local.resolve(&syntax)
9}
10
11/// SyntaxPtr is a cheap `Copy` id which identifies a particular syntax node,
12/// without retaining syntax tree in memory. You need to explicitly `resolve`
13/// `SyntaxPtr` to get a `SyntaxNode`
14#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
15pub(crate) struct SyntaxPtr {
16 file_id: FileId,
17 local: LocalSyntaxPtr,
18}
19
20impl SyntaxPtr {
21 pub(crate) fn new(file_id: FileId, node: SyntaxNodeRef) -> SyntaxPtr {
22 let local = LocalSyntaxPtr::new(node);
23 SyntaxPtr { file_id, local }
24 }
25
26 pub(crate) fn file_id(self) -> FileId {
27 self.file_id
28 }
29}
30
31/// A pionter to a syntax node inside a file.
32#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
33pub(crate) struct LocalSyntaxPtr {
34 range: TextRange,
35 kind: SyntaxKind,
36}
37
38impl LocalSyntaxPtr {
39 pub(crate) fn new(node: SyntaxNodeRef) -> LocalSyntaxPtr {
40 LocalSyntaxPtr {
41 range: node.range(),
42 kind: node.kind(),
43 }
44 }
45
46 pub(crate) fn resolve(self, file: &SourceFileNode) -> SyntaxNode {
47 let mut curr = file.syntax();
48 loop {
49 if curr.range() == self.range && curr.kind() == self.kind {
50 return curr.owned();
51 }
52 curr = curr
53 .children()
54 .find(|it| self.range.is_subrange(&it.range()))
55 .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
56 }
57 }
58
59 pub(crate) fn into_global(self, file_id: FileId) -> SyntaxPtr {
60 SyntaxPtr {
61 file_id,
62 local: self,
63 }
64 }
65
66 // Seems unfortunate to expose
67 pub(crate) fn range(self) -> TextRange {
68 self.range
69 }
70}
71
72#[test]
73fn test_local_syntax_ptr() {
74 use ra_syntax::{ast, AstNode};
75 let file = SourceFileNode::parse("struct Foo { f: u32, }");
76 let field = file
77 .syntax()
78 .descendants()
79 .find_map(ast::NamedFieldDef::cast)
80 .unwrap();
81 let ptr = LocalSyntaxPtr::new(field.syntax());
82 let field_syntax = ptr.resolve(&file);
83 assert_eq!(field.syntax(), field_syntax);
84}
diff --git a/crates/ra_analysis/tests/tests.rs b/crates/ra_analysis/tests/tests.rs
index 719c166b5..fbe89f444 100644
--- a/crates/ra_analysis/tests/tests.rs
+++ b/crates/ra_analysis/tests/tests.rs
@@ -10,10 +10,10 @@ use test_utils::assert_eq_dbg;
10 10
11use ra_analysis::{ 11use ra_analysis::{
12 mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, 12 mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis},
13 AnalysisChange, CrateGraph, FileId, FnDescriptor, 13 AnalysisChange, CrateGraph, FileId, FnSignatureInfo,
14}; 14};
15 15
16fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) { 16fn get_signature(text: &str) -> (FnSignatureInfo, Option<usize>) {
17 let (analysis, position) = single_file_with_position(text); 17 let (analysis, position) = single_file_with_position(text);
18 analysis.resolve_callable(position).unwrap().unwrap() 18 analysis.resolve_callable(position).unwrap().unwrap()
19} 19}
@@ -126,7 +126,7 @@ fn test_resolve_crate_root() {
126 let mut host = mock.analysis_host(); 126 let mut host = mock.analysis_host();
127 assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); 127 assert!(host.analysis().crate_for(mod_file).unwrap().is_empty());
128 128
129 let mut crate_graph = CrateGraph::new(); 129 let mut crate_graph = CrateGraph::default();
130 let crate_id = crate_graph.add_crate_root(root_file); 130 let crate_id = crate_graph.add_crate_root(root_file);
131 let mut change = AnalysisChange::new(); 131 let mut change = AnalysisChange::new();
132 change.set_crate_graph(crate_graph); 132 change.set_crate_graph(crate_graph);
@@ -447,8 +447,8 @@ fn test_complete_crate_path() {
447 ); 447 );
448 let completions = analysis.completions(position).unwrap().unwrap(); 448 let completions = analysis.completions(position).unwrap().unwrap();
449 assert_eq_dbg( 449 assert_eq_dbg(
450 r#"[CompletionItem { label: "foo", lookup: None, snippet: None }, 450 r#"[CompletionItem { label: "Spam", lookup: None, snippet: None },
451 CompletionItem { label: "Spam", lookup: None, snippet: None }]"#, 451 CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
452 &completions, 452 &completions,
453 ); 453 );
454} 454}
@@ -466,8 +466,8 @@ fn test_complete_crate_path_with_braces() {
466 ); 466 );
467 let completions = analysis.completions(position).unwrap().unwrap(); 467 let completions = analysis.completions(position).unwrap().unwrap();
468 assert_eq_dbg( 468 assert_eq_dbg(
469 r#"[CompletionItem { label: "foo", lookup: None, snippet: None }, 469 r#"[CompletionItem { label: "Spam", lookup: None, snippet: None },
470 CompletionItem { label: "Spam", lookup: None, snippet: None }]"#, 470 CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
471 &completions, 471 &completions,
472 ); 472 );
473} 473}
diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml
new file mode 100644
index 000000000..f316c0ab2
--- /dev/null
+++ b/crates/ra_db/Cargo.toml
@@ -0,0 +1,14 @@
1[package]
2edition = "2018"
3name = "ra_db"
4version = "0.1.0"
5authors = ["Aleksey Kladov <[email protected]>"]
6
7[dependencies]
8relative-path = "0.4.0"
9salsa = "0.8.0"
10rustc-hash = "1.0"
11parking_lot = "0.6.4"
12ra_syntax = { path = "../ra_syntax" }
13ra_editor = { path = "../ra_editor" }
14test_utils = { path = "../test_utils" }
diff --git a/crates/ra_db/src/file_resolver.rs b/crates/ra_db/src/file_resolver.rs
new file mode 100644
index 000000000..f849ac752
--- /dev/null
+++ b/crates/ra_db/src/file_resolver.rs
@@ -0,0 +1,76 @@
1use std::{
2 sync::Arc,
3 hash::{Hash, Hasher},
4 fmt,
5};
6
7use relative_path::RelativePath;
8
9use crate::input::FileId;
10
11pub trait FileResolver: fmt::Debug + Send + Sync + 'static {
12 fn file_stem(&self, file_id: FileId) -> String;
13 fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>;
14 fn debug_path(&self, _1file_id: FileId) -> Option<std::path::PathBuf> {
15 None
16 }
17}
18
19#[derive(Clone, Debug)]
20pub struct FileResolverImp {
21 inner: Arc<FileResolver>,
22}
23
24impl PartialEq for FileResolverImp {
25 fn eq(&self, other: &FileResolverImp) -> bool {
26 self.inner() == other.inner()
27 }
28}
29
30impl Eq for FileResolverImp {}
31
32impl Hash for FileResolverImp {
33 fn hash<H: Hasher>(&self, hasher: &mut H) {
34 self.inner().hash(hasher);
35 }
36}
37
38impl FileResolverImp {
39 pub fn new(inner: Arc<FileResolver>) -> FileResolverImp {
40 FileResolverImp { inner }
41 }
42 pub fn file_stem(&self, file_id: FileId) -> String {
43 self.inner.file_stem(file_id)
44 }
45 pub fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
46 self.inner.resolve(file_id, path)
47 }
48 pub fn debug_path(&self, file_id: FileId) -> Option<std::path::PathBuf> {
49 self.inner.debug_path(file_id)
50 }
51 fn inner(&self) -> *const FileResolver {
52 &*self.inner
53 }
54}
55
56impl Default for FileResolverImp {
57 fn default() -> FileResolverImp {
58 #[derive(Debug)]
59 struct DummyResolver;
60 impl FileResolver for DummyResolver {
61 fn file_stem(&self, _file_: FileId) -> String {
62 panic!("file resolver not set")
63 }
64 fn resolve(
65 &self,
66 _file_id: FileId,
67 _path: &::relative_path::RelativePath,
68 ) -> Option<FileId> {
69 panic!("file resolver not set")
70 }
71 }
72 FileResolverImp {
73 inner: Arc::new(DummyResolver),
74 }
75 }
76}
diff --git a/crates/ra_analysis/src/input.rs b/crates/ra_db/src/input.rs
index ba8a17fd5..9101ac7a8 100644
--- a/crates/ra_analysis/src/input.rs
+++ b/crates/ra_db/src/input.rs
@@ -1,11 +1,10 @@
1use std::{fmt, sync::Arc}; 1use std::sync::Arc;
2 2
3use relative_path::RelativePath;
4use rustc_hash::FxHashMap; 3use rustc_hash::FxHashMap;
5use rustc_hash::FxHashSet; 4use rustc_hash::FxHashSet;
6use salsa; 5use salsa;
7 6
8use crate::{symbol_index::SymbolIndex, FileResolverImp}; 7use crate::file_resolver::FileResolverImp;
9 8
10#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 9#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
11pub struct FileId(pub u32); 10pub struct FileId(pub u32);
@@ -19,8 +18,8 @@ pub struct CrateGraph {
19} 18}
20 19
21impl CrateGraph { 20impl CrateGraph {
22 pub fn new() -> CrateGraph { 21 pub fn crate_root(&self, crate_id: CrateId) -> FileId {
23 CrateGraph::default() 22 self.crate_roots[&crate_id]
24 } 23 }
25 pub fn add_crate_root(&mut self, file_id: FileId) -> CrateId { 24 pub fn add_crate_root(&mut self, file_id: FileId) -> CrateId {
26 let crate_id = CrateId(self.crate_roots.len() as u32); 25 let crate_id = CrateId(self.crate_roots.len() as u32);
@@ -28,15 +27,17 @@ impl CrateGraph {
28 assert!(prev.is_none()); 27 assert!(prev.is_none());
29 crate_id 28 crate_id
30 } 29 }
31} 30 pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
32 31 let (&crate_id, _) = self
33pub trait FileResolver: fmt::Debug + Send + Sync + 'static { 32 .crate_roots
34 fn file_stem(&self, file_id: FileId) -> String; 33 .iter()
35 fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>; 34 .find(|(_crate_id, &root_id)| root_id == file_id)?;
35 Some(crate_id)
36 }
36} 37}
37 38
38salsa::query_group! { 39salsa::query_group! {
39 pub(crate) trait FilesDatabase: salsa::Database { 40 pub trait FilesDatabase: salsa::Database {
40 fn file_text(file_id: FileId) -> Arc<String> { 41 fn file_text(file_id: FileId) -> Arc<String> {
41 type FileTextQuery; 42 type FileTextQuery;
42 storage input; 43 storage input;
@@ -53,10 +54,6 @@ salsa::query_group! {
53 type LibrariesQuery; 54 type LibrariesQuery;
54 storage input; 55 storage input;
55 } 56 }
56 fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> {
57 type LibrarySymbolsQuery;
58 storage input;
59 }
60 fn crate_graph() -> Arc<CrateGraph> { 57 fn crate_graph() -> Arc<CrateGraph> {
61 type CrateGraphQuery; 58 type CrateGraphQuery;
62 storage input; 59 storage input;
@@ -65,12 +62,12 @@ salsa::query_group! {
65} 62}
66 63
67#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] 64#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
68pub(crate) struct SourceRootId(pub(crate) u32); 65pub struct SourceRootId(pub u32);
69 66
70#[derive(Default, Clone, Debug, PartialEq, Eq)] 67#[derive(Default, Clone, Debug, PartialEq, Eq)]
71pub(crate) struct SourceRoot { 68pub struct SourceRoot {
72 pub(crate) file_resolver: FileResolverImp, 69 pub file_resolver: FileResolverImp,
73 pub(crate) files: FxHashSet<FileId>, 70 pub files: FxHashSet<FileId>,
74} 71}
75 72
76pub(crate) const WORKSPACE: SourceRootId = SourceRootId(0); 73pub const WORKSPACE: SourceRootId = SourceRootId(0);
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs
new file mode 100644
index 000000000..53805aada
--- /dev/null
+++ b/crates/ra_db/src/lib.rs
@@ -0,0 +1,83 @@
1//! ra_db defines basic database traits. Concrete DB is defined by ra_analysis.
2mod syntax_ptr;
3mod file_resolver;
4mod input;
5mod loc2id;
6pub mod mock;
7
8use std::sync::Arc;
9use ra_editor::LineIndex;
10use ra_syntax::{TextUnit, SourceFileNode};
11
12#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
13pub struct Canceled;
14
15pub type Cancelable<T> = Result<T, Canceled>;
16
17impl std::fmt::Display for Canceled {
18 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
19 fmt.write_str("Canceled")
20 }
21}
22
23impl std::error::Error for Canceled {}
24
25pub use crate::{
26 syntax_ptr::LocalSyntaxPtr,
27 file_resolver::{FileResolver, FileResolverImp},
28 input::{
29 FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, WORKSPACE,
30 FileTextQuery, FileSourceRootQuery, SourceRootQuery, LibrariesQuery, CrateGraphQuery,
31 },
32 loc2id::{LocationIntener, NumericId},
33};
34
35#[macro_export]
36macro_rules! impl_numeric_id {
37 ($id:ident) => {
38 impl $crate::NumericId for $id {
39 fn from_u32(id: u32) -> Self {
40 $id(id)
41 }
42 fn to_u32(self) -> u32 {
43 self.0
44 }
45 }
46 };
47}
48
49pub trait BaseDatabase: salsa::Database {
50 fn check_canceled(&self) -> Cancelable<()> {
51 if self.salsa_runtime().is_current_revision_canceled() {
52 Err(Canceled)
53 } else {
54 Ok(())
55 }
56 }
57}
58
59salsa::query_group! {
60 pub trait SyntaxDatabase: crate::input::FilesDatabase + BaseDatabase {
61 fn source_file(file_id: FileId) -> SourceFileNode {
62 type SourceFileQuery;
63 }
64 fn file_lines(file_id: FileId) -> Arc<LineIndex> {
65 type FileLinesQuery;
66 }
67 }
68}
69
70fn source_file(db: &impl SyntaxDatabase, file_id: FileId) -> SourceFileNode {
71 let text = db.file_text(file_id);
72 SourceFileNode::parse(&*text)
73}
74fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
75 let text = db.file_text(file_id);
76 Arc::new(LineIndex::new(&*text))
77}
78
79#[derive(Clone, Copy, Debug)]
80pub struct FilePosition {
81 pub file_id: FileId,
82 pub offset: TextUnit,
83}
diff --git a/crates/ra_db/src/loc2id.rs b/crates/ra_db/src/loc2id.rs
new file mode 100644
index 000000000..69ba43d0f
--- /dev/null
+++ b/crates/ra_db/src/loc2id.rs
@@ -0,0 +1,100 @@
1use parking_lot::Mutex;
2
3use std::hash::Hash;
4
5use rustc_hash::FxHashMap;
6
7/// There are two principle ways to refer to things:
8/// - by their locatinon (module in foo/bar/baz.rs at line 42)
9/// - by their numeric id (module `ModuleId(42)`)
10///
11/// The first one is more powerful (you can actually find the thing in question
12/// by id), but the second one is so much more compact.
13///
14/// `Loc2IdMap` allows us to have a cake an eat it as well: by maintaining a
15/// bidirectional mapping between positional and numeric ids, we can use compact
16/// representation wich still allows us to get the actual item
17#[derive(Debug)]
18struct Loc2IdMap<LOC, ID>
19where
20 ID: NumericId,
21 LOC: Clone + Eq + Hash,
22{
23 loc2id: FxHashMap<LOC, ID>,
24 id2loc: FxHashMap<ID, LOC>,
25}
26
27impl<LOC, ID> Default for Loc2IdMap<LOC, ID>
28where
29 ID: NumericId,
30 LOC: Clone + Eq + Hash,
31{
32 fn default() -> Self {
33 Loc2IdMap {
34 loc2id: FxHashMap::default(),
35 id2loc: FxHashMap::default(),
36 }
37 }
38}
39
40impl<LOC, ID> Loc2IdMap<LOC, ID>
41where
42 ID: NumericId,
43 LOC: Clone + Eq + Hash,
44{
45 pub fn loc2id(&mut self, loc: &LOC) -> ID {
46 match self.loc2id.get(loc) {
47 Some(id) => return id.clone(),
48 None => (),
49 }
50 let id = self.loc2id.len();
51 assert!(id < u32::max_value() as usize);
52 let id = ID::from_u32(id as u32);
53 self.loc2id.insert(loc.clone(), id.clone());
54 self.id2loc.insert(id.clone(), loc.clone());
55 id
56 }
57
58 pub fn id2loc(&self, id: ID) -> LOC {
59 self.id2loc[&id].clone()
60 }
61}
62
63pub trait NumericId: Clone + Eq + Hash {
64 fn from_u32(id: u32) -> Self;
65 fn to_u32(self) -> u32;
66}
67
68#[derive(Debug)]
69pub struct LocationIntener<LOC, ID>
70where
71 ID: NumericId,
72 LOC: Clone + Eq + Hash,
73{
74 map: Mutex<Loc2IdMap<LOC, ID>>,
75}
76
77impl<LOC, ID> Default for LocationIntener<LOC, ID>
78where
79 ID: NumericId,
80 LOC: Clone + Eq + Hash,
81{
82 fn default() -> Self {
83 LocationIntener {
84 map: Default::default(),
85 }
86 }
87}
88
89impl<LOC, ID> LocationIntener<LOC, ID>
90where
91 ID: NumericId,
92 LOC: Clone + Eq + Hash,
93{
94 pub fn loc2id(&self, loc: &LOC) -> ID {
95 self.map.lock().loc2id(loc)
96 }
97 pub fn id2loc(&self, id: ID) -> LOC {
98 self.map.lock().id2loc(id)
99 }
100}
diff --git a/crates/ra_db/src/mock.rs b/crates/ra_db/src/mock.rs
new file mode 100644
index 000000000..2840f9655
--- /dev/null
+++ b/crates/ra_db/src/mock.rs
@@ -0,0 +1,51 @@
1use std::sync::Arc;
2
3use rustc_hash::FxHashSet;
4use relative_path::{RelativePath, RelativePathBuf};
5
6use crate::{FileId, FileResolver, SourceRoot, FileResolverImp};
7
8#[derive(Default, Debug)]
9pub struct FileMap(Vec<(FileId, RelativePathBuf)>);
10
11impl FileMap {
12 pub fn add(&mut self, path: RelativePathBuf) -> FileId {
13 let file_id = FileId((self.0.len() + 1) as u32);
14 self.0.push((file_id, path));
15 file_id
16 }
17
18 pub fn into_source_root(self) -> SourceRoot {
19 let files = self.files();
20 let file_resolver = FileResolverImp::new(Arc::new(self));
21 SourceRoot {
22 file_resolver,
23 files,
24 }
25 }
26
27 pub fn files(&self) -> FxHashSet<FileId> {
28 self.iter().map(|(id, _)| id).collect()
29 }
30
31 fn iter<'a>(&'a self) -> impl Iterator<Item = (FileId, &'a RelativePath)> + 'a {
32 self.0
33 .iter()
34 .map(|(id, path)| (*id, path.as_relative_path()))
35 }
36
37 fn path(&self, id: FileId) -> &RelativePath {
38 self.iter().find(|&(it, _)| it == id).unwrap().1
39 }
40}
41
42impl FileResolver for FileMap {
43 fn file_stem(&self, id: FileId) -> String {
44 self.path(id).file_stem().unwrap().to_string()
45 }
46 fn resolve(&self, id: FileId, rel: &RelativePath) -> Option<FileId> {
47 let path = self.path(id).join(rel).normalize();
48 let id = self.iter().find(|&(_, p)| path == p)?.0;
49 Some(id)
50 }
51}
diff --git a/crates/ra_db/src/syntax_ptr.rs b/crates/ra_db/src/syntax_ptr.rs
new file mode 100644
index 000000000..dac94dd36
--- /dev/null
+++ b/crates/ra_db/src/syntax_ptr.rs
@@ -0,0 +1,48 @@
1use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
2
3/// A pionter to a syntax node inside a file.
4#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
5pub struct LocalSyntaxPtr {
6 range: TextRange,
7 kind: SyntaxKind,
8}
9
10impl LocalSyntaxPtr {
11 pub fn new(node: SyntaxNodeRef) -> LocalSyntaxPtr {
12 LocalSyntaxPtr {
13 range: node.range(),
14 kind: node.kind(),
15 }
16 }
17
18 pub fn resolve(self, file: &SourceFileNode) -> SyntaxNode {
19 let mut curr = file.syntax();
20 loop {
21 if curr.range() == self.range && curr.kind() == self.kind {
22 return curr.owned();
23 }
24 curr = curr
25 .children()
26 .find(|it| self.range.is_subrange(&it.range()))
27 .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
28 }
29 }
30
31 pub fn range(self) -> TextRange {
32 self.range
33 }
34}
35
36#[test]
37fn test_local_syntax_ptr() {
38 use ra_syntax::{ast, AstNode};
39 let file = SourceFileNode::parse("struct Foo { f: u32, }");
40 let field = file
41 .syntax()
42 .descendants()
43 .find_map(ast::NamedFieldDef::cast)
44 .unwrap();
45 let ptr = LocalSyntaxPtr::new(field.syntax());
46 let field_syntax = ptr.resolve(&file);
47 assert_eq!(field.syntax(), field_syntax);
48}
diff --git a/crates/ra_editor/src/code_actions.rs b/crates/ra_editor/src/code_actions.rs
index bc0e120d3..6979251d1 100644
--- a/crates/ra_editor/src/code_actions.rs
+++ b/crates/ra_editor/src/code_actions.rs
@@ -4,7 +4,7 @@ use ra_syntax::{
4 algo::{find_covering_node, find_leaf_at_offset}, 4 algo::{find_covering_node, find_leaf_at_offset},
5 ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner}, 5 ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner},
6 Direction, SourceFileNode, 6 Direction, SourceFileNode,
7 SyntaxKind::{COMMA, WHITESPACE}, 7 SyntaxKind::{COMMA, WHITESPACE, COMMENT},
8 SyntaxNodeRef, TextRange, TextUnit, 8 SyntaxNodeRef, TextRange, TextUnit,
9}; 9};
10 10
@@ -41,7 +41,8 @@ pub fn add_derive<'a>(
41 offset: TextUnit, 41 offset: TextUnit,
42) -> Option<impl FnOnce() -> LocalEdit + 'a> { 42) -> Option<impl FnOnce() -> LocalEdit + 'a> {
43 let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?; 43 let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?;
44 Some(move || { 44 let node_start = derive_insertion_offset(nominal)?;
45 return Some(move || {
45 let derive_attr = nominal 46 let derive_attr = nominal
46 .attrs() 47 .attrs()
47 .filter_map(|x| x.as_call()) 48 .filter_map(|x| x.as_call())
@@ -51,7 +52,6 @@ pub fn add_derive<'a>(
51 let mut edit = EditBuilder::new(); 52 let mut edit = EditBuilder::new();
52 let offset = match derive_attr { 53 let offset = match derive_attr {
53 None => { 54 None => {
54 let node_start = nominal.syntax().range().start();
55 edit.insert(node_start, "#[derive()]\n".to_string()); 55 edit.insert(node_start, "#[derive()]\n".to_string());
56 node_start + TextUnit::of_str("#[derive(") 56 node_start + TextUnit::of_str("#[derive(")
57 } 57 }
@@ -61,7 +61,16 @@ pub fn add_derive<'a>(
61 edit: edit.finish(), 61 edit: edit.finish(),
62 cursor_position: Some(offset), 62 cursor_position: Some(offset),
63 } 63 }
64 }) 64 });
65
66 // Insert `derive` after doc comments.
67 fn derive_insertion_offset(nominal: ast::NominalDef) -> Option<TextUnit> {
68 let non_ws_child = nominal
69 .syntax()
70 .children()
71 .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
72 Some(non_ws_child.range().start())
73 }
65} 74}
66 75
67pub fn add_impl<'a>( 76pub fn add_impl<'a>(
@@ -186,7 +195,7 @@ mod tests {
186 } 195 }
187 196
188 #[test] 197 #[test]
189 fn test_add_derive() { 198 fn add_derive_new() {
190 check_action( 199 check_action(
191 "struct Foo { a: i32, <|>}", 200 "struct Foo { a: i32, <|>}",
192 "#[derive(<|>)]\nstruct Foo { a: i32, }", 201 "#[derive(<|>)]\nstruct Foo { a: i32, }",
@@ -197,6 +206,10 @@ mod tests {
197 "#[derive(<|>)]\nstruct Foo { a: i32, }", 206 "#[derive(<|>)]\nstruct Foo { a: i32, }",
198 |file, off| add_derive(file, off).map(|f| f()), 207 |file, off| add_derive(file, off).map(|f| f()),
199 ); 208 );
209 }
210
211 #[test]
212 fn add_derive_existing() {
200 check_action( 213 check_action(
201 "#[derive(Clone)]\nstruct Foo { a: i32<|>, }", 214 "#[derive(Clone)]\nstruct Foo { a: i32<|>, }",
202 "#[derive(Clone<|>)]\nstruct Foo { a: i32, }", 215 "#[derive(Clone<|>)]\nstruct Foo { a: i32, }",
@@ -205,6 +218,24 @@ mod tests {
205 } 218 }
206 219
207 #[test] 220 #[test]
221 fn add_derive_new_with_doc_comment() {
222 check_action(
223 "
224/// `Foo` is a pretty important struct.
225/// It does stuff.
226struct Foo { a: i32<|>, }
227 ",
228 "
229/// `Foo` is a pretty important struct.
230/// It does stuff.
231#[derive(<|>)]
232struct Foo { a: i32, }
233 ",
234 |file, off| add_derive(file, off).map(|f| f()),
235 );
236 }
237
238 #[test]
208 fn test_add_impl() { 239 fn test_add_impl() {
209 check_action( 240 check_action(
210 "struct Foo {<|>}\n", 241 "struct Foo {<|>}\n",
diff --git a/crates/ra_editor/src/lib.rs b/crates/ra_editor/src/lib.rs
index ff4e8303d..c6b116159 100644
--- a/crates/ra_editor/src/lib.rs
+++ b/crates/ra_editor/src/lib.rs
@@ -148,12 +148,7 @@ pub fn find_node_at_offset<'a, N: AstNode<'a>>(
148 syntax: SyntaxNodeRef<'a>, 148 syntax: SyntaxNodeRef<'a>,
149 offset: TextUnit, 149 offset: TextUnit,
150) -> Option<N> { 150) -> Option<N> {
151 let leaves = find_leaf_at_offset(syntax, offset); 151 find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast))
152 let leaf = leaves
153 .clone()
154 .find(|leaf| !leaf.kind().is_trivia())
155 .or_else(|| leaves.right_biased())?;
156 leaf.ancestors().filter_map(N::cast).next()
157} 152}
158 153
159#[cfg(test)] 154#[cfg(test)]
diff --git a/crates/ra_editor/src/line_index.rs b/crates/ra_editor/src/line_index.rs
index 9abbb0d09..aab7e4081 100644
--- a/crates/ra_editor/src/line_index.rs
+++ b/crates/ra_editor/src/line_index.rs
@@ -1,43 +1,124 @@
1use crate::TextUnit; 1use crate::TextUnit;
2use rustc_hash::FxHashMap;
2use superslice::Ext; 3use superslice::Ext;
3 4
4#[derive(Clone, Debug, Hash, PartialEq, Eq)] 5#[derive(Clone, Debug, PartialEq, Eq)]
5pub struct LineIndex { 6pub struct LineIndex {
6 newlines: Vec<TextUnit>, 7 newlines: Vec<TextUnit>,
8 utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
7} 9}
8 10
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] 11#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
10pub struct LineCol { 12pub struct LineCol {
11 pub line: u32, 13 pub line: u32,
12 pub col: TextUnit, 14 pub col_utf16: u32,
15}
16
17#[derive(Clone, Debug, Hash, PartialEq, Eq)]
18struct Utf16Char {
19 start: TextUnit,
20 end: TextUnit,
21}
22
23impl Utf16Char {
24 fn len(&self) -> TextUnit {
25 self.end - self.start
26 }
13} 27}
14 28
15impl LineIndex { 29impl LineIndex {
16 pub fn new(text: &str) -> LineIndex { 30 pub fn new(text: &str) -> LineIndex {
31 let mut utf16_lines = FxHashMap::default();
32 let mut utf16_chars = Vec::new();
33
17 let mut newlines = vec![0.into()]; 34 let mut newlines = vec![0.into()];
18 let mut curr = 0.into(); 35 let mut curr_row = 0.into();
36 let mut curr_col = 0.into();
37 let mut line = 0;
19 for c in text.chars() { 38 for c in text.chars() {
20 curr += TextUnit::of_char(c); 39 curr_row += TextUnit::of_char(c);
21 if c == '\n' { 40 if c == '\n' {
22 newlines.push(curr); 41 newlines.push(curr_row);
42
43 // Save any utf-16 characters seen in the previous line
44 if utf16_chars.len() > 0 {
45 utf16_lines.insert(line, utf16_chars);
46 utf16_chars = Vec::new();
47 }
48
49 // Prepare for processing the next line
50 curr_col = 0.into();
51 line += 1;
52 continue;
23 } 53 }
54
55 let char_len = TextUnit::of_char(c);
56 if char_len.to_usize() > 1 {
57 utf16_chars.push(Utf16Char {
58 start: curr_col,
59 end: curr_col + char_len,
60 });
61 }
62
63 curr_col += char_len;
64 }
65 LineIndex {
66 newlines,
67 utf16_lines,
24 } 68 }
25 LineIndex { newlines }
26 } 69 }
27 70
28 pub fn line_col(&self, offset: TextUnit) -> LineCol { 71 pub fn line_col(&self, offset: TextUnit) -> LineCol {
29 let line = self.newlines.upper_bound(&offset) - 1; 72 let line = self.newlines.upper_bound(&offset) - 1;
30 let line_start_offset = self.newlines[line]; 73 let line_start_offset = self.newlines[line];
31 let col = offset - line_start_offset; 74 let col = offset - line_start_offset;
75
32 LineCol { 76 LineCol {
33 line: line as u32, 77 line: line as u32,
34 col, 78 col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32,
35 } 79 }
36 } 80 }
37 81
38 pub fn offset(&self, line_col: LineCol) -> TextUnit { 82 pub fn offset(&self, line_col: LineCol) -> TextUnit {
39 //TODO: return Result 83 //TODO: return Result
40 self.newlines[line_col.line as usize] + line_col.col 84 let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16);
85 self.newlines[line_col.line as usize] + col
86 }
87
88 fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize {
89 if let Some(utf16_chars) = self.utf16_lines.get(&line) {
90 let mut correction = TextUnit::from_usize(0);
91 for c in utf16_chars {
92 if col >= c.end {
93 correction += c.len() - TextUnit::from_usize(1);
94 } else {
95 // From here on, all utf16 characters come *after* the character we are mapping,
96 // so we don't need to take them into account
97 break;
98 }
99 }
100
101 col -= correction;
102 }
103
104 col.to_usize()
105 }
106
107 fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit {
108 let mut col: TextUnit = col.into();
109 if let Some(utf16_chars) = self.utf16_lines.get(&line) {
110 for c in utf16_chars {
111 if col >= c.start {
112 col += c.len() - TextUnit::from_usize(1);
113 } else {
114 // From here on, all utf16 characters come *after* the character we are mapping,
115 // so we don't need to take them into account
116 break;
117 }
118 }
119 }
120
121 col
41 } 122 }
42} 123}
43 124
@@ -49,63 +130,63 @@ fn test_line_index() {
49 index.line_col(0.into()), 130 index.line_col(0.into()),
50 LineCol { 131 LineCol {
51 line: 0, 132 line: 0,
52 col: 0.into() 133 col_utf16: 0
53 } 134 }
54 ); 135 );
55 assert_eq!( 136 assert_eq!(
56 index.line_col(1.into()), 137 index.line_col(1.into()),
57 LineCol { 138 LineCol {
58 line: 0, 139 line: 0,
59 col: 1.into() 140 col_utf16: 1
60 } 141 }
61 ); 142 );
62 assert_eq!( 143 assert_eq!(
63 index.line_col(5.into()), 144 index.line_col(5.into()),
64 LineCol { 145 LineCol {
65 line: 0, 146 line: 0,
66 col: 5.into() 147 col_utf16: 5
67 } 148 }
68 ); 149 );
69 assert_eq!( 150 assert_eq!(
70 index.line_col(6.into()), 151 index.line_col(6.into()),
71 LineCol { 152 LineCol {
72 line: 1, 153 line: 1,
73 col: 0.into() 154 col_utf16: 0
74 } 155 }
75 ); 156 );
76 assert_eq!( 157 assert_eq!(
77 index.line_col(7.into()), 158 index.line_col(7.into()),
78 LineCol { 159 LineCol {
79 line: 1, 160 line: 1,
80 col: 1.into() 161 col_utf16: 1
81 } 162 }
82 ); 163 );
83 assert_eq!( 164 assert_eq!(
84 index.line_col(8.into()), 165 index.line_col(8.into()),
85 LineCol { 166 LineCol {
86 line: 1, 167 line: 1,
87 col: 2.into() 168 col_utf16: 2
88 } 169 }
89 ); 170 );
90 assert_eq!( 171 assert_eq!(
91 index.line_col(10.into()), 172 index.line_col(10.into()),
92 LineCol { 173 LineCol {
93 line: 1, 174 line: 1,
94 col: 4.into() 175 col_utf16: 4
95 } 176 }
96 ); 177 );
97 assert_eq!( 178 assert_eq!(
98 index.line_col(11.into()), 179 index.line_col(11.into()),
99 LineCol { 180 LineCol {
100 line: 1, 181 line: 1,
101 col: 5.into() 182 col_utf16: 5
102 } 183 }
103 ); 184 );
104 assert_eq!( 185 assert_eq!(
105 index.line_col(12.into()), 186 index.line_col(12.into()),
106 LineCol { 187 LineCol {
107 line: 1, 188 line: 1,
108 col: 6.into() 189 col_utf16: 6
109 } 190 }
110 ); 191 );
111 192
@@ -115,35 +196,129 @@ fn test_line_index() {
115 index.line_col(0.into()), 196 index.line_col(0.into()),
116 LineCol { 197 LineCol {
117 line: 0, 198 line: 0,
118 col: 0.into() 199 col_utf16: 0
119 } 200 }
120 ); 201 );
121 assert_eq!( 202 assert_eq!(
122 index.line_col(1.into()), 203 index.line_col(1.into()),
123 LineCol { 204 LineCol {
124 line: 1, 205 line: 1,
125 col: 0.into() 206 col_utf16: 0
126 } 207 }
127 ); 208 );
128 assert_eq!( 209 assert_eq!(
129 index.line_col(2.into()), 210 index.line_col(2.into()),
130 LineCol { 211 LineCol {
131 line: 1, 212 line: 1,
132 col: 1.into() 213 col_utf16: 1
133 } 214 }
134 ); 215 );
135 assert_eq!( 216 assert_eq!(
136 index.line_col(6.into()), 217 index.line_col(6.into()),
137 LineCol { 218 LineCol {
138 line: 1, 219 line: 1,
139 col: 5.into() 220 col_utf16: 5
140 } 221 }
141 ); 222 );
142 assert_eq!( 223 assert_eq!(
143 index.line_col(7.into()), 224 index.line_col(7.into()),
144 LineCol { 225 LineCol {
145 line: 2, 226 line: 2,
146 col: 0.into() 227 col_utf16: 0
147 } 228 }
148 ); 229 );
149} 230}
231
232#[cfg(test)]
233mod test_utf8_utf16_conv {
234 use super::*;
235
236 #[test]
237 fn test_char_len() {
238 assert_eq!('メ'.len_utf8(), 3);
239 assert_eq!('メ'.len_utf16(), 1);
240 }
241
242 #[test]
243 fn test_empty_index() {
244 let col_index = LineIndex::new(
245 "
246const C: char = 'x';
247",
248 );
249 assert_eq!(col_index.utf16_lines.len(), 0);
250 }
251
252 #[test]
253 fn test_single_char() {
254 let col_index = LineIndex::new(
255 "
256const C: char = 'メ';
257",
258 );
259
260 assert_eq!(col_index.utf16_lines.len(), 1);
261 assert_eq!(col_index.utf16_lines[&1].len(), 1);
262 assert_eq!(
263 col_index.utf16_lines[&1][0],
264 Utf16Char {
265 start: 17.into(),
266 end: 20.into()
267 }
268 );
269
270 // UTF-8 to UTF-16, no changes
271 assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
272
273 // UTF-8 to UTF-16
274 assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
275
276 // UTF-16 to UTF-8, no changes
277 assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15));
278
279 // UTF-16 to UTF-8
280 assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21));
281 }
282
283 #[test]
284 fn test_string() {
285 let col_index = LineIndex::new(
286 "
287const C: char = \"メ メ\";
288",
289 );
290
291 assert_eq!(col_index.utf16_lines.len(), 1);
292 assert_eq!(col_index.utf16_lines[&1].len(), 2);
293 assert_eq!(
294 col_index.utf16_lines[&1][0],
295 Utf16Char {
296 start: 17.into(),
297 end: 20.into()
298 }
299 );
300 assert_eq!(
301 col_index.utf16_lines[&1][1],
302 Utf16Char {
303 start: 21.into(),
304 end: 24.into()
305 }
306 );
307
308 // UTF-8 to UTF-16
309 assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
310
311 assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
312 assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
313
314 assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
315
316 // UTF-16 to UTF-8
317 assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15));
318
319 assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20));
320 assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23));
321
322 assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15));
323 }
324}
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml
new file mode 100644
index 000000000..1b9e148b2
--- /dev/null
+++ b/crates/ra_hir/Cargo.toml
@@ -0,0 +1,17 @@
1[package]
2edition = "2018"
3name = "ra_hir"
4version = "0.1.0"
5authors = ["Aleksey Kladov <[email protected]>"]
6
7[dependencies]
8log = "0.4.5"
9relative-path = "0.4.0"
10salsa = "0.8.0"
11rustc-hash = "1.0"
12parking_lot = "0.6.4"
13id-arena = "2.0"
14ra_syntax = { path = "../ra_syntax" }
15ra_editor = { path = "../ra_editor" }
16ra_db = { path = "../ra_db" }
17test_utils = { path = "../test_utils" }
diff --git a/crates/ra_hir/src/arena.rs b/crates/ra_hir/src/arena.rs
new file mode 100644
index 000000000..d4f9d9cb9
--- /dev/null
+++ b/crates/ra_hir/src/arena.rs
@@ -0,0 +1,66 @@
1//! A simple id-based arena, similar to https://github.com/fitzgen/id-arena.
2//! We use our own version for more compact id's and to allow inherent impls
3//! on Ids.
4
5use std::{
6 fmt,
7 hash::{Hash, Hasher},
8 marker::PhantomData,
9};
10
11pub struct Id<T> {
12 idx: u32,
13 _ty: PhantomData<fn() -> T>,
14}
15
16impl<T> fmt::Debug for Id<T> {
17 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
18 f.debug_tuple("Id").field(&self.idx).finish()
19 }
20}
21impl<T> Copy for Id<T> {}
22impl<T> Clone for Id<T> {
23 fn clone(&self) -> Id<T> {
24 *self
25 }
26}
27
28impl<T> PartialEq for Id<T> {
29 fn eq(&self, other: &Id<T>) -> bool {
30 self.idx == other.idx
31 }
32}
33
34impl<T> Eq for Id<T> {}
35
36impl<T> Hash for Id<T> {
37 fn hash<H: Hasher>(&self, h: &mut H) {
38 self.idx.hash(h);
39 }
40}
41
42#[derive(Debug, PartialEq, Eq)]
43pub(crate) struct ArenaBehavior<T> {
44 _ty: PhantomData<T>,
45}
46
47impl<T> id_arena::ArenaBehavior for ArenaBehavior<T> {
48 type Id = Id<T>;
49 fn new_arena_id() -> u32 {
50 0
51 }
52 fn new_id(_arena_id: u32, index: usize) -> Id<T> {
53 Id {
54 idx: index as u32,
55 _ty: PhantomData,
56 }
57 }
58 fn index(id: Id<T>) -> usize {
59 id.idx as usize
60 }
61 fn arena_id(_id: Id<T>) -> u32 {
62 0
63 }
64}
65
66pub(crate) type Arena<T> = id_arena::Arena<T, ArenaBehavior<T>>;
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs
new file mode 100644
index 000000000..2f01bae6d
--- /dev/null
+++ b/crates/ra_hir/src/db.rs
@@ -0,0 +1,66 @@
1use std::sync::Arc;
2
3use ra_syntax::{
4 SyntaxNode,
5 ast::FnDefNode,
6};
7use ra_db::{SourceRootId, LocationIntener, SyntaxDatabase, FileId, Cancelable};
8
9use crate::{
10 DefLoc, DefId, FnId,
11 SourceFileItems, SourceItemId,
12 query_definitions,
13 FnScopes,
14 module::{ModuleId, ModuleTree, ModuleSource,
15 nameres::{ItemMap, InputModuleItems}},
16};
17
18salsa::query_group! {
19
20pub trait HirDatabase: SyntaxDatabase
21 + AsRef<LocationIntener<DefLoc, DefId>>
22 + AsRef<LocationIntener<SourceItemId, FnId>>
23{
24 fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> {
25 type FnScopesQuery;
26 use fn query_definitions::fn_scopes;
27 }
28 fn fn_syntax(fn_id: FnId) -> FnDefNode {
29 type FnSyntaxQuery;
30 // Don't retain syntax trees in memory
31 storage dependencies;
32 use fn query_definitions::fn_syntax;
33 }
34
35 fn file_items(file_id: FileId) -> Arc<SourceFileItems> {
36 type SourceFileItemsQuery;
37 storage dependencies;
38 use fn query_definitions::file_items;
39 }
40
41 fn file_item(source_item_id: SourceItemId) -> SyntaxNode {
42 type FileItemQuery;
43 storage dependencies;
44 use fn query_definitions::file_item;
45 }
46
47 fn submodules(source: ModuleSource) -> Cancelable<Arc<Vec<crate::module::imp::Submodule>>> {
48 type SubmodulesQuery;
49 use fn query_definitions::submodules;
50 }
51
52 fn input_module_items(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<InputModuleItems>> {
53 type InputModuleItemsQuery;
54 use fn query_definitions::input_module_items;
55 }
56 fn item_map(source_root_id: SourceRootId) -> Cancelable<Arc<ItemMap>> {
57 type ItemMapQuery;
58 use fn query_definitions::item_map;
59 }
60 fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
61 type ModuleTreeQuery;
62 use fn crate::module::imp::module_tree;
63 }
64}
65
66}
diff --git a/crates/ra_analysis/src/descriptors/function/mod.rs b/crates/ra_hir/src/function/mod.rs
index d5db28a64..c8af2e54f 100644
--- a/crates/ra_analysis/src/descriptors/function/mod.rs
+++ b/crates/ra_hir/src/function/mod.rs
@@ -1,29 +1,83 @@
1pub(super) mod imp;
2mod scope; 1mod scope;
3 2
4use std::cmp::{max, min}; 3use std::{
4 cmp::{max, min},
5 sync::Arc,
6};
5 7
6use ra_syntax::{ 8use ra_syntax::{
9 TextRange, TextUnit, SyntaxNodeRef,
7 ast::{self, AstNode, DocCommentsOwner, NameOwner}, 10 ast::{self, AstNode, DocCommentsOwner, NameOwner},
8 TextRange, TextUnit,
9}; 11};
12use ra_db::FileId;
10 13
11use crate::{syntax_ptr::SyntaxPtr, FileId}; 14use crate::{
12 15 FnId, HirDatabase, SourceItemId,
13pub(crate) use self::scope::{resolve_local_name, FnScopes}; 16};
14 17
15#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 18pub use self::scope::FnScopes;
16pub(crate) struct FnId(SyntaxPtr);
17 19
18impl FnId { 20impl FnId {
19 pub(crate) fn new(file_id: FileId, fn_def: ast::FnDef) -> FnId { 21 pub fn get(db: &impl HirDatabase, file_id: FileId, fn_def: ast::FnDef) -> FnId {
20 let ptr = SyntaxPtr::new(file_id, fn_def.syntax()); 22 let file_items = db.file_items(file_id);
21 FnId(ptr) 23 let item_id = file_items.id_of(fn_def.syntax());
24 let item_id = SourceItemId { file_id, item_id };
25 FnId::from_loc(db, &item_id)
26 }
27}
28
29pub struct Function {
30 fn_id: FnId,
31}
32
33impl Function {
34 pub fn guess_from_source(
35 db: &impl HirDatabase,
36 file_id: FileId,
37 fn_def: ast::FnDef,
38 ) -> Function {
39 let fn_id = FnId::get(db, file_id, fn_def);
40 Function { fn_id }
41 }
42
43 pub fn guess_for_name_ref(
44 db: &impl HirDatabase,
45 file_id: FileId,
46 name_ref: ast::NameRef,
47 ) -> Option<Function> {
48 Function::guess_for_node(db, file_id, name_ref.syntax())
49 }
50
51 pub fn guess_for_bind_pat(
52 db: &impl HirDatabase,
53 file_id: FileId,
54 bind_pat: ast::BindPat,
55 ) -> Option<Function> {
56 Function::guess_for_node(db, file_id, bind_pat.syntax())
57 }
58
59 fn guess_for_node(
60 db: &impl HirDatabase,
61 file_id: FileId,
62 node: SyntaxNodeRef,
63 ) -> Option<Function> {
64 let fn_def = node.ancestors().find_map(ast::FnDef::cast)?;
65 let res = Function::guess_from_source(db, file_id, fn_def);
66 Some(res)
67 }
68
69 pub fn scope(&self, db: &impl HirDatabase) -> Arc<FnScopes> {
70 db.fn_scopes(self.fn_id)
71 }
72
73 pub fn signature_info(&self, db: &impl HirDatabase) -> Option<FnSignatureInfo> {
74 let syntax = db.fn_syntax(self.fn_id);
75 FnSignatureInfo::new(syntax.borrowed())
22 } 76 }
23} 77}
24 78
25#[derive(Debug, Clone)] 79#[derive(Debug, Clone)]
26pub struct FnDescriptor { 80pub struct FnSignatureInfo {
27 pub name: String, 81 pub name: String,
28 pub label: String, 82 pub label: String,
29 pub ret_type: Option<String>, 83 pub ret_type: Option<String>,
@@ -31,8 +85,8 @@ pub struct FnDescriptor {
31 pub doc: Option<String>, 85 pub doc: Option<String>,
32} 86}
33 87
34impl FnDescriptor { 88impl FnSignatureInfo {
35 pub fn new(node: ast::FnDef) -> Option<Self> { 89 fn new(node: ast::FnDef) -> Option<Self> {
36 let name = node.name()?.text().to_string(); 90 let name = node.name()?.text().to_string();
37 91
38 let mut doc = None; 92 let mut doc = None;
@@ -51,7 +105,7 @@ impl FnDescriptor {
51 node.syntax().text().to_string() 105 node.syntax().text().to_string()
52 }; 106 };
53 107
54 if let Some((comment_range, docs)) = FnDescriptor::extract_doc_comments(node) { 108 if let Some((comment_range, docs)) = FnSignatureInfo::extract_doc_comments(node) {
55 let comment_range = comment_range 109 let comment_range = comment_range
56 .checked_sub(node.syntax().range().start()) 110 .checked_sub(node.syntax().range().start())
57 .unwrap(); 111 .unwrap();
@@ -83,10 +137,10 @@ impl FnDescriptor {
83 } 137 }
84 } 138 }
85 139
86 let params = FnDescriptor::param_list(node); 140 let params = FnSignatureInfo::param_list(node);
87 let ret_type = node.ret_type().map(|r| r.syntax().text().to_string()); 141 let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
88 142
89 Some(FnDescriptor { 143 Some(FnSignatureInfo {
90 name, 144 name,
91 ret_type, 145 ret_type,
92 params, 146 params,
diff --git a/crates/ra_analysis/src/descriptors/function/scope.rs b/crates/ra_hir/src/function/scope.rs
index bbe16947c..863453291 100644
--- a/crates/ra_analysis/src/descriptors/function/scope.rs
+++ b/crates/ra_hir/src/function/scope.rs
@@ -1,20 +1,22 @@
1use rustc_hash::{FxHashMap, FxHashSet}; 1use rustc_hash::{FxHashMap, FxHashSet};
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 AstNode, SmolStr, SyntaxNodeRef, TextRange,
4 algo::generate, 5 algo::generate,
5 ast::{self, ArgListOwner, LoopBodyOwner, NameOwner}, 6 ast::{self, ArgListOwner, LoopBodyOwner, NameOwner},
6 AstNode, SmolStr, SyntaxNodeRef,
7}; 7};
8use ra_db::LocalSyntaxPtr;
8 9
9use crate::syntax_ptr::LocalSyntaxPtr; 10use crate::{
11 arena::{Arena, Id},
12};
10 13
11#[derive(Clone, Copy, PartialEq, Eq, Debug)] 14pub(crate) type ScopeId = Id<ScopeData>;
12pub(crate) struct ScopeId(u32);
13 15
14#[derive(Debug, PartialEq, Eq)] 16#[derive(Debug, PartialEq, Eq)]
15pub struct FnScopes { 17pub struct FnScopes {
16 pub(crate) self_param: Option<LocalSyntaxPtr>, 18 pub self_param: Option<LocalSyntaxPtr>,
17 scopes: Vec<ScopeData>, 19 scopes: Arena<ScopeData>,
18 scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>, 20 scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>,
19} 21}
20 22
@@ -25,19 +27,19 @@ pub struct ScopeEntry {
25} 27}
26 28
27#[derive(Debug, PartialEq, Eq)] 29#[derive(Debug, PartialEq, Eq)]
28struct ScopeData { 30pub struct ScopeData {
29 parent: Option<ScopeId>, 31 parent: Option<ScopeId>,
30 entries: Vec<ScopeEntry>, 32 entries: Vec<ScopeEntry>,
31} 33}
32 34
33impl FnScopes { 35impl FnScopes {
34 pub(crate) fn new(fn_def: ast::FnDef) -> FnScopes { 36 pub fn new(fn_def: ast::FnDef) -> FnScopes {
35 let mut scopes = FnScopes { 37 let mut scopes = FnScopes {
36 self_param: fn_def 38 self_param: fn_def
37 .param_list() 39 .param_list()
38 .and_then(|it| it.self_param()) 40 .and_then(|it| it.self_param())
39 .map(|it| LocalSyntaxPtr::new(it.syntax())), 41 .map(|it| LocalSyntaxPtr::new(it.syntax())),
40 scopes: Vec::new(), 42 scopes: Arena::default(),
41 scope_for: FxHashMap::default(), 43 scope_for: FxHashMap::default(),
42 }; 44 };
43 let root = scopes.root_scope(); 45 let root = scopes.root_scope();
@@ -47,27 +49,56 @@ impl FnScopes {
47 } 49 }
48 scopes 50 scopes
49 } 51 }
50 pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { 52 pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
51 &self.get(scope).entries 53 &self.scopes[scope].entries
52 } 54 }
53 pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a { 55 pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a {
54 generate(self.scope_for(node), move |&scope| self.get(scope).parent) 56 generate(self.scope_for(node), move |&scope| {
57 self.scopes[scope].parent
58 })
59 }
60 pub fn resolve_local_name<'a>(&'a self, name_ref: ast::NameRef) -> Option<&'a ScopeEntry> {
61 let mut shadowed = FxHashSet::default();
62 let ret = self
63 .scope_chain(name_ref.syntax())
64 .flat_map(|scope| self.entries(scope).iter())
65 .filter(|entry| shadowed.insert(entry.name()))
66 .filter(|entry| entry.name() == &name_ref.text())
67 .nth(0);
68 ret
69 }
70
71 pub fn find_all_refs(&self, pat: ast::BindPat) -> Vec<ReferenceDescriptor> {
72 let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
73 let name_ptr = LocalSyntaxPtr::new(pat.syntax());
74 let refs: Vec<_> = fn_def
75 .syntax()
76 .descendants()
77 .filter_map(ast::NameRef::cast)
78 .filter(|name_ref| match self.resolve_local_name(*name_ref) {
79 None => false,
80 Some(entry) => entry.ptr() == name_ptr,
81 })
82 .map(|name_ref| ReferenceDescriptor {
83 name: name_ref.syntax().text().to_string(),
84 range: name_ref.syntax().range(),
85 })
86 .collect();
87
88 refs
55 } 89 }
90
56 fn root_scope(&mut self) -> ScopeId { 91 fn root_scope(&mut self) -> ScopeId {
57 let res = ScopeId(self.scopes.len() as u32); 92 self.scopes.alloc(ScopeData {
58 self.scopes.push(ScopeData {
59 parent: None, 93 parent: None,
60 entries: vec![], 94 entries: vec![],
61 }); 95 })
62 res
63 } 96 }
64 fn new_scope(&mut self, parent: ScopeId) -> ScopeId { 97 fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
65 let res = ScopeId(self.scopes.len() as u32); 98 self.scopes.alloc(ScopeData {
66 self.scopes.push(ScopeData {
67 parent: Some(parent), 99 parent: Some(parent),
68 entries: vec![], 100 entries: vec![],
69 }); 101 })
70 res
71 } 102 }
72 fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) { 103 fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) {
73 let entries = pat 104 let entries = pat
@@ -75,7 +106,7 @@ impl FnScopes {
75 .descendants() 106 .descendants()
76 .filter_map(ast::BindPat::cast) 107 .filter_map(ast::BindPat::cast)
77 .filter_map(ScopeEntry::new); 108 .filter_map(ScopeEntry::new);
78 self.get_mut(scope).entries.extend(entries); 109 self.scopes[scope].entries.extend(entries);
79 } 110 }
80 fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) { 111 fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) {
81 params 112 params
@@ -93,12 +124,6 @@ impl FnScopes {
93 .filter_map(|it| self.scope_for.get(&it).map(|&scope| scope)) 124 .filter_map(|it| self.scope_for.get(&it).map(|&scope| scope))
94 .next() 125 .next()
95 } 126 }
96 fn get(&self, scope: ScopeId) -> &ScopeData {
97 &self.scopes[scope.0 as usize]
98 }
99 fn get_mut(&mut self, scope: ScopeId) -> &mut ScopeData {
100 &mut self.scopes[scope.0 as usize]
101 }
102} 127}
103 128
104impl ScopeEntry { 129impl ScopeEntry {
@@ -110,10 +135,10 @@ impl ScopeEntry {
110 }; 135 };
111 Some(res) 136 Some(res)
112 } 137 }
113 pub(crate) fn name(&self) -> &SmolStr { 138 pub fn name(&self) -> &SmolStr {
114 &self.name 139 &self.name
115 } 140 }
116 pub(crate) fn ptr(&self) -> LocalSyntaxPtr { 141 pub fn ptr(&self) -> LocalSyntaxPtr {
117 self.ptr 142 self.ptr
118 } 143 }
119} 144}
@@ -255,18 +280,10 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
255 } 280 }
256} 281}
257 282
258pub fn resolve_local_name<'a>( 283#[derive(Debug)]
259 name_ref: ast::NameRef, 284pub struct ReferenceDescriptor {
260 scopes: &'a FnScopes, 285 pub range: TextRange,
261) -> Option<&'a ScopeEntry> { 286 pub name: String,
262 let mut shadowed = FxHashSet::default();
263 let ret = scopes
264 .scope_chain(name_ref.syntax())
265 .flat_map(|scope| scopes.entries(scope).iter())
266 .filter(|entry| shadowed.insert(entry.name()))
267 .filter(|entry| entry.name() == &name_ref.text())
268 .nth(0);
269 ret
270} 287}
271 288
272#[cfg(test)] 289#[cfg(test)]
@@ -382,7 +399,7 @@ mod tests {
382 399
383 let scopes = FnScopes::new(fn_def); 400 let scopes = FnScopes::new(fn_def);
384 401
385 let local_name_entry = resolve_local_name(name_ref, &scopes).unwrap(); 402 let local_name_entry = scopes.resolve_local_name(name_ref).unwrap();
386 let local_name = local_name_entry.ptr().resolve(&file); 403 let local_name = local_name_entry.ptr().resolve(&file);
387 let expected_name = 404 let expected_name =
388 find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap(); 405 find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap();
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs
new file mode 100644
index 000000000..e7b6a81f4
--- /dev/null
+++ b/crates/ra_hir/src/lib.rs
@@ -0,0 +1,141 @@
1//! HIR (previsouly known as descriptors) provides a high-level OO acess to Rust
2//! code.
3//!
4//! The principal difference between HIR and syntax trees is that HIR is bound
5//! to a particular crate instance. That is, it has cfg flags and features
6//! applied. So, there relation between syntax and HIR is many-to-one.
7
8macro_rules! ctry {
9 ($expr:expr) => {
10 match $expr {
11 None => return Ok(None),
12 Some(it) => it,
13 }
14 };
15}
16
17pub mod db;
18#[cfg(test)]
19mod mock;
20mod query_definitions;
21mod function;
22mod module;
23mod path;
24mod arena;
25
26use std::ops::Index;
27
28use ra_syntax::{SyntaxNodeRef, SyntaxNode};
29use ra_db::{LocationIntener, SourceRootId, FileId, Cancelable};
30
31use crate::{
32 db::HirDatabase,
33 arena::{Arena, Id},
34};
35
36pub use self::{
37 path::{Path, PathKind},
38 module::{Module, ModuleId, Problem, nameres::ItemMap},
39 function::{Function, FnScopes},
40};
41
42pub use self::function::FnSignatureInfo;
43
44#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
45pub struct FnId(u32);
46ra_db::impl_numeric_id!(FnId);
47
48impl FnId {
49 pub fn from_loc(
50 db: &impl AsRef<LocationIntener<SourceItemId, FnId>>,
51 loc: &SourceItemId,
52 ) -> FnId {
53 db.as_ref().loc2id(loc)
54 }
55 pub fn loc(self, db: &impl AsRef<LocationIntener<SourceItemId, FnId>>) -> SourceItemId {
56 db.as_ref().id2loc(self)
57 }
58}
59
60#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
61pub struct DefId(u32);
62ra_db::impl_numeric_id!(DefId);
63
64#[derive(Clone, Debug, PartialEq, Eq, Hash)]
65pub enum DefLoc {
66 Module {
67 id: ModuleId,
68 source_root: SourceRootId,
69 },
70 Item {
71 source_item_id: SourceItemId,
72 },
73}
74
75impl DefId {
76 pub fn loc(self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefLoc {
77 db.as_ref().id2loc(self)
78 }
79}
80
81impl DefLoc {
82 pub fn id(&self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefId {
83 db.as_ref().loc2id(&self)
84 }
85}
86
87pub enum Def {
88 Module(Module),
89 Item,
90}
91
92impl DefId {
93 pub fn resolve(self, db: &impl HirDatabase) -> Cancelable<Def> {
94 let loc = self.loc(db);
95 let res = match loc {
96 DefLoc::Module { id, source_root } => {
97 let descr = Module::new(db, source_root, id)?;
98 Def::Module(descr)
99 }
100 DefLoc::Item { .. } => Def::Item,
101 };
102 Ok(res)
103 }
104}
105
106/// Identifier of item within a specific file. This is stable over reparses, so
107/// it's OK to use it as a salsa key/value.
108pub(crate) type SourceFileItemId = Id<SyntaxNode>;
109
110#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
111pub struct SourceItemId {
112 file_id: FileId,
113 item_id: SourceFileItemId,
114}
115
116/// Maps item's `SyntaxNode`s to `SourceFileItemId` and back.
117#[derive(Debug, PartialEq, Eq, Default)]
118pub struct SourceFileItems {
119 arena: Arena<SyntaxNode>,
120}
121
122impl SourceFileItems {
123 fn alloc(&mut self, item: SyntaxNode) -> SourceFileItemId {
124 self.arena.alloc(item)
125 }
126 pub fn id_of(&self, item: SyntaxNodeRef) -> SourceFileItemId {
127 let (id, _item) = self
128 .arena
129 .iter()
130 .find(|(_id, i)| i.borrowed() == item)
131 .unwrap();
132 id
133 }
134}
135
136impl Index<SourceFileItemId> for SourceFileItems {
137 type Output = SyntaxNode;
138 fn index(&self, idx: SourceFileItemId) -> &SyntaxNode {
139 &self.arena[idx]
140 }
141}
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs
new file mode 100644
index 000000000..8e256b89f
--- /dev/null
+++ b/crates/ra_hir/src/mock.rs
@@ -0,0 +1,172 @@
1use std::sync::Arc;
2
3use parking_lot::Mutex;
4use salsa::{self, Database};
5use ra_db::{LocationIntener, BaseDatabase, FilePosition, mock::FileMap, FileId, WORKSPACE};
6use relative_path::RelativePathBuf;
7use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset};
8
9use crate::{db, DefId, DefLoc, FnId, SourceItemId};
10
11#[derive(Debug)]
12pub(crate) struct MockDatabase {
13 events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>,
14 runtime: salsa::Runtime<MockDatabase>,
15 id_maps: Arc<IdMaps>,
16}
17
18impl MockDatabase {
19 pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) {
20 let mut db = MockDatabase::default();
21
22 let mut position = None;
23 let mut file_map = FileMap::default();
24 for entry in parse_fixture(fixture) {
25 if entry.text.contains(CURSOR_MARKER) {
26 assert!(
27 position.is_none(),
28 "only one marker (<|>) per fixture is allowed"
29 );
30 position = Some(db.add_file_with_position(&mut file_map, &entry.meta, &entry.text));
31 } else {
32 db.add_file(&mut file_map, &entry.meta, &entry.text);
33 }
34 }
35 let position = position.expect("expected a marker (<|>)");
36 let source_root = file_map.into_source_root();
37 db.query_mut(ra_db::SourceRootQuery)
38 .set(WORKSPACE, Arc::new(source_root));
39 (db, position)
40 }
41
42 fn add_file(&mut self, file_map: &mut FileMap, path: &str, text: &str) -> FileId {
43 assert!(path.starts_with('/'));
44 let path = RelativePathBuf::from_path(&path[1..]).unwrap();
45
46 let file_id = file_map.add(path);
47 let text = Arc::new(text.to_string());
48 self.query_mut(ra_db::FileTextQuery).set(file_id, text);
49 self.query_mut(ra_db::FileSourceRootQuery)
50 .set(file_id, WORKSPACE);
51 file_id
52 }
53
54 fn add_file_with_position(
55 &mut self,
56 file_map: &mut FileMap,
57 path: &str,
58 text: &str,
59 ) -> FilePosition {
60 let (offset, text) = extract_offset(text);
61 let file_id = self.add_file(file_map, path, &text);
62 FilePosition { file_id, offset }
63 }
64}
65
66#[derive(Debug, Default)]
67struct IdMaps {
68 fns: LocationIntener<SourceItemId, FnId>,
69 defs: LocationIntener<DefLoc, DefId>,
70}
71
72impl salsa::Database for MockDatabase {
73 fn salsa_runtime(&self) -> &salsa::Runtime<MockDatabase> {
74 &self.runtime
75 }
76
77 fn salsa_event(&self, event: impl Fn() -> salsa::Event<MockDatabase>) {
78 let mut events = self.events.lock();
79 if let Some(events) = &mut *events {
80 events.push(event());
81 }
82 }
83}
84
85impl Default for MockDatabase {
86 fn default() -> MockDatabase {
87 let mut db = MockDatabase {
88 events: Default::default(),
89 runtime: salsa::Runtime::default(),
90 id_maps: Default::default(),
91 };
92 db.query_mut(ra_db::SourceRootQuery)
93 .set(ra_db::WORKSPACE, Default::default());
94 db.query_mut(ra_db::CrateGraphQuery)
95 .set((), Default::default());
96 db.query_mut(ra_db::LibrariesQuery)
97 .set((), Default::default());
98 db
99 }
100}
101
102impl salsa::ParallelDatabase for MockDatabase {
103 fn snapshot(&self) -> salsa::Snapshot<MockDatabase> {
104 salsa::Snapshot::new(MockDatabase {
105 events: Default::default(),
106 runtime: self.runtime.snapshot(self),
107 id_maps: self.id_maps.clone(),
108 })
109 }
110}
111
112impl BaseDatabase for MockDatabase {}
113
114impl AsRef<LocationIntener<DefLoc, DefId>> for MockDatabase {
115 fn as_ref(&self) -> &LocationIntener<DefLoc, DefId> {
116 &self.id_maps.defs
117 }
118}
119
120impl AsRef<LocationIntener<SourceItemId, FnId>> for MockDatabase {
121 fn as_ref(&self) -> &LocationIntener<SourceItemId, FnId> {
122 &self.id_maps.fns
123 }
124}
125
126impl MockDatabase {
127 pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event<MockDatabase>> {
128 *self.events.lock() = Some(Vec::new());
129 f();
130 let events = self.events.lock().take().unwrap();
131 events
132 }
133
134 pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
135 let events = self.log(f);
136 events
137 .into_iter()
138 .filter_map(|e| match e.kind {
139 // This pretty horrible, but `Debug` is the only way to inspect
140 // QueryDescriptor at the moment.
141 salsa::EventKind::WillExecute { descriptor } => Some(format!("{:?}", descriptor)),
142 _ => None,
143 })
144 .collect()
145 }
146}
147
148salsa::database_storage! {
149 pub(crate) struct MockDatabaseStorage for MockDatabase {
150 impl ra_db::FilesDatabase {
151 fn file_text() for ra_db::FileTextQuery;
152 fn file_source_root() for ra_db::FileSourceRootQuery;
153 fn source_root() for ra_db::SourceRootQuery;
154 fn libraries() for ra_db::LibrariesQuery;
155 fn crate_graph() for ra_db::CrateGraphQuery;
156 }
157 impl ra_db::SyntaxDatabase {
158 fn source_file() for ra_db::SourceFileQuery;
159 fn file_lines() for ra_db::FileLinesQuery;
160 }
161 impl db::HirDatabase {
162 fn module_tree() for db::ModuleTreeQuery;
163 fn fn_scopes() for db::FnScopesQuery;
164 fn file_items() for db::SourceFileItemsQuery;
165 fn file_item() for db::FileItemQuery;
166 fn input_module_items() for db::InputModuleItemsQuery;
167 fn item_map() for db::ItemMapQuery;
168 fn fn_syntax() for db::FnSyntaxQuery;
169 fn submodules() for db::SubmodulesQuery;
170 }
171 }
172}
diff --git a/crates/ra_analysis/src/descriptors/module/imp.rs b/crates/ra_hir/src/module/imp.rs
index ade96ddc0..76ea129a7 100644
--- a/crates/ra_analysis/src/descriptors/module/imp.rs
+++ b/crates/ra_hir/src/module/imp.rs
@@ -1,26 +1,24 @@
1use std::sync::Arc; 1use std::sync::Arc;
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 ast::{self, ModuleItemOwner, NameOwner}, 4 ast::{self, NameOwner},
5 SmolStr, 5 SmolStr,
6}; 6};
7use relative_path::RelativePathBuf; 7use relative_path::RelativePathBuf;
8use rustc_hash::{FxHashMap, FxHashSet}; 8use rustc_hash::{FxHashMap, FxHashSet};
9use ra_db::{SourceRoot, SourceRootId, FileResolverImp, Cancelable, FileId,};
9 10
10use crate::{ 11use crate::{
11 db, 12 HirDatabase,
12 descriptors::DescriptorDatabase,
13 input::{SourceRoot, SourceRootId},
14 Cancelable, FileId, FileResolverImp,
15}; 13};
16 14
17use super::{ 15use super::{
18 LinkData, LinkId, ModuleData, ModuleId, ModuleScope, ModuleSource, ModuleSourceNode, 16 LinkData, LinkId, ModuleData, ModuleId, ModuleSource,
19 ModuleTree, Problem, 17 ModuleTree, Problem,
20}; 18};
21 19
22#[derive(Clone, Hash, PartialEq, Eq, Debug)] 20#[derive(Clone, Hash, PartialEq, Eq, Debug)]
23pub(crate) enum Submodule { 21pub enum Submodule {
24 Declaration(SmolStr), 22 Declaration(SmolStr),
25 Definition(SmolStr, ModuleSource), 23 Definition(SmolStr, ModuleSource),
26} 24}
@@ -34,39 +32,6 @@ impl Submodule {
34 } 32 }
35} 33}
36 34
37pub(crate) fn submodules(
38 db: &impl DescriptorDatabase,
39 source: ModuleSource,
40) -> Cancelable<Arc<Vec<Submodule>>> {
41 db::check_canceled(db)?;
42 let file_id = source.file_id();
43 let submodules = match source.resolve(db) {
44 ModuleSourceNode::SourceFile(it) => collect_submodules(file_id, it.borrowed()),
45 ModuleSourceNode::Module(it) => it
46 .borrowed()
47 .item_list()
48 .map(|it| collect_submodules(file_id, it))
49 .unwrap_or_else(Vec::new),
50 };
51 return Ok(Arc::new(submodules));
52
53 fn collect_submodules<'a>(
54 file_id: FileId,
55 root: impl ast::ModuleItemOwner<'a>,
56 ) -> Vec<Submodule> {
57 modules(root)
58 .map(|(name, m)| {
59 if m.has_semi() {
60 Submodule::Declaration(name)
61 } else {
62 let src = ModuleSource::new_inline(file_id, m);
63 Submodule::Definition(name, src)
64 }
65 })
66 .collect()
67 }
68}
69
70pub(crate) fn modules<'a>( 35pub(crate) fn modules<'a>(
71 root: impl ast::ModuleItemOwner<'a>, 36 root: impl ast::ModuleItemOwner<'a>,
72) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> { 37) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
@@ -81,40 +46,20 @@ pub(crate) fn modules<'a>(
81 }) 46 })
82} 47}
83 48
84pub(crate) fn module_scope(
85 db: &impl DescriptorDatabase,
86 source_root_id: SourceRootId,
87 module_id: ModuleId,
88) -> Cancelable<Arc<ModuleScope>> {
89 let tree = db.module_tree(source_root_id)?;
90 let source = module_id.source(&tree).resolve(db);
91 let res = match source {
92 ModuleSourceNode::SourceFile(it) => ModuleScope::new(it.borrowed().items()),
93 ModuleSourceNode::Module(it) => match it.borrowed().item_list() {
94 Some(items) => ModuleScope::new(items.items()),
95 None => ModuleScope::new(std::iter::empty()),
96 },
97 };
98 Ok(Arc::new(res))
99}
100
101pub(crate) fn module_tree( 49pub(crate) fn module_tree(
102 db: &impl DescriptorDatabase, 50 db: &impl HirDatabase,
103 source_root: SourceRootId, 51 source_root: SourceRootId,
104) -> Cancelable<Arc<ModuleTree>> { 52) -> Cancelable<Arc<ModuleTree>> {
105 db::check_canceled(db)?; 53 db.check_canceled()?;
106 let res = create_module_tree(db, source_root)?; 54 let res = create_module_tree(db, source_root)?;
107 Ok(Arc::new(res)) 55 Ok(Arc::new(res))
108} 56}
109 57
110fn create_module_tree<'a>( 58fn create_module_tree<'a>(
111 db: &impl DescriptorDatabase, 59 db: &impl HirDatabase,
112 source_root: SourceRootId, 60 source_root: SourceRootId,
113) -> Cancelable<ModuleTree> { 61) -> Cancelable<ModuleTree> {
114 let mut tree = ModuleTree { 62 let mut tree = ModuleTree::default();
115 mods: Vec::new(),
116 links: Vec::new(),
117 };
118 63
119 let mut roots = FxHashMap::default(); 64 let mut roots = FxHashMap::default();
120 let mut visited = FxHashSet::default(); 65 let mut visited = FxHashSet::default();
@@ -141,7 +86,7 @@ fn create_module_tree<'a>(
141} 86}
142 87
143fn build_subtree( 88fn build_subtree(
144 db: &impl DescriptorDatabase, 89 db: &impl HirDatabase,
145 source_root: &SourceRoot, 90 source_root: &SourceRoot,
146 tree: &mut ModuleTree, 91 tree: &mut ModuleTree,
147 visited: &mut FxHashSet<ModuleSource>, 92 visited: &mut FxHashSet<ModuleSource>,
@@ -171,7 +116,7 @@ fn build_subtree(
171 .into_iter() 116 .into_iter()
172 .map(|file_id| match roots.remove(&file_id) { 117 .map(|file_id| match roots.remove(&file_id) {
173 Some(module_id) => { 118 Some(module_id) => {
174 tree.module_mut(module_id).parent = Some(link); 119 tree.mods[module_id].parent = Some(link);
175 Ok(module_id) 120 Ok(module_id)
176 } 121 }
177 None => build_subtree( 122 None => build_subtree(
@@ -201,8 +146,8 @@ fn build_subtree(
201 } 146 }
202 }; 147 };
203 148
204 tree.link_mut(link).points_to = points_to; 149 tree.links[link].points_to = points_to;
205 tree.link_mut(link).problem = problem; 150 tree.links[link].problem = problem;
206 } 151 }
207 Ok(id) 152 Ok(id)
208} 153}
diff --git a/crates/ra_hir/src/module/mod.rs b/crates/ra_hir/src/module/mod.rs
new file mode 100644
index 000000000..3ae83d8cb
--- /dev/null
+++ b/crates/ra_hir/src/module/mod.rs
@@ -0,0 +1,373 @@
1pub(super) mod imp;
2pub(super) mod nameres;
3
4use std::sync::Arc;
5
6use ra_editor::find_node_at_offset;
7
8use ra_syntax::{
9 algo::generate,
10 ast::{self, AstNode, NameOwner},
11 SmolStr, SyntaxNode,
12};
13use ra_db::{SourceRootId, FileId, FilePosition, Cancelable};
14use relative_path::RelativePathBuf;
15
16use crate::{
17 DefLoc, DefId, Path, PathKind, HirDatabase, SourceItemId,
18 arena::{Arena, Id},
19};
20
21pub use self::nameres::ModuleScope;
22
23/// `Module` is API entry point to get all the information
24/// about a particular module.
25#[derive(Debug, Clone)]
26pub struct Module {
27 tree: Arc<ModuleTree>,
28 source_root_id: SourceRootId,
29 module_id: ModuleId,
30}
31
32impl Module {
33 /// Lookup `Module` by `FileId`. Note that this is inherently
34 /// lossy transformation: in general, a single source might correspond to
35 /// several modules.
36 pub fn guess_from_file_id(
37 db: &impl HirDatabase,
38 file_id: FileId,
39 ) -> Cancelable<Option<Module>> {
40 Module::guess_from_source(db, file_id, ModuleSource::SourceFile(file_id))
41 }
42
43 /// Lookup `Module` by position in the source code. Note that this
44 /// is inherently lossy transformation: in general, a single source might
45 /// correspond to several modules.
46 pub fn guess_from_position(
47 db: &impl HirDatabase,
48 position: FilePosition,
49 ) -> Cancelable<Option<Module>> {
50 let file = db.source_file(position.file_id);
51 let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset)
52 {
53 Some(m) if !m.has_semi() => ModuleSource::new_inline(db, position.file_id, m),
54 _ => ModuleSource::SourceFile(position.file_id),
55 };
56 Module::guess_from_source(db, position.file_id, module_source)
57 }
58
59 fn guess_from_source(
60 db: &impl HirDatabase,
61 file_id: FileId,
62 module_source: ModuleSource,
63 ) -> Cancelable<Option<Module>> {
64 let source_root_id = db.file_source_root(file_id);
65 let module_tree = db.module_tree(source_root_id)?;
66
67 let res = match module_tree.any_module_for_source(module_source) {
68 None => None,
69 Some(module_id) => Some(Module {
70 tree: module_tree,
71 source_root_id,
72 module_id,
73 }),
74 };
75 Ok(res)
76 }
77
78 pub(super) fn new(
79 db: &impl HirDatabase,
80 source_root_id: SourceRootId,
81 module_id: ModuleId,
82 ) -> Cancelable<Module> {
83 let module_tree = db.module_tree(source_root_id)?;
84 let res = Module {
85 tree: module_tree,
86 source_root_id,
87 module_id,
88 };
89 Ok(res)
90 }
91
92 /// Returns `mod foo;` or `mod foo {}` node whihc declared this module.
93 /// Returns `None` for the root module
94 pub fn parent_link_source(&self, db: &impl HirDatabase) -> Option<(FileId, ast::ModuleNode)> {
95 let link = self.module_id.parent_link(&self.tree)?;
96 let file_id = link.owner(&self.tree).source(&self.tree).file_id();
97 let src = link.bind_source(&self.tree, db);
98 Some((file_id, src))
99 }
100
101 pub fn source(&self) -> ModuleSource {
102 self.module_id.source(&self.tree)
103 }
104
105 /// Parent module. Returns `None` if this is a root module.
106 pub fn parent(&self) -> Option<Module> {
107 let parent_id = self.module_id.parent(&self.tree)?;
108 Some(Module {
109 module_id: parent_id,
110 ..self.clone()
111 })
112 }
113
114 /// The root of the tree this module is part of
115 pub fn crate_root(&self) -> Module {
116 let root_id = self.module_id.crate_root(&self.tree);
117 Module {
118 module_id: root_id,
119 ..self.clone()
120 }
121 }
122
123 /// `name` is `None` for the crate's root module
124 pub fn name(&self) -> Option<SmolStr> {
125 let link = self.module_id.parent_link(&self.tree)?;
126 Some(link.name(&self.tree))
127 }
128
129 pub fn def_id(&self, db: &impl HirDatabase) -> DefId {
130 let def_loc = DefLoc::Module {
131 id: self.module_id,
132 source_root: self.source_root_id,
133 };
134 def_loc.id(db)
135 }
136
137 /// Finds a child module with the specified name.
138 pub fn child(&self, name: &str) -> Option<Module> {
139 let child_id = self.module_id.child(&self.tree, name)?;
140 Some(Module {
141 module_id: child_id,
142 ..self.clone()
143 })
144 }
145
146 /// Returns a `ModuleScope`: a set of items, visible in this module.
147 pub fn scope(&self, db: &impl HirDatabase) -> Cancelable<ModuleScope> {
148 let item_map = db.item_map(self.source_root_id)?;
149 let res = item_map.per_module[&self.module_id].clone();
150 Ok(res)
151 }
152
153 pub fn resolve_path(&self, db: &impl HirDatabase, path: Path) -> Cancelable<Option<DefId>> {
154 let mut curr = match path.kind {
155 PathKind::Crate => self.crate_root(),
156 PathKind::Self_ | PathKind::Plain => self.clone(),
157 PathKind::Super => ctry!(self.parent()),
158 }
159 .def_id(db);
160
161 let segments = path.segments;
162 for name in segments.iter() {
163 let module = match curr.loc(db) {
164 DefLoc::Module { id, source_root } => Module::new(db, source_root, id)?,
165 _ => return Ok(None),
166 };
167 let scope = module.scope(db)?;
168 curr = ctry!(ctry!(scope.get(&name)).def_id);
169 }
170 Ok(Some(curr))
171 }
172
173 pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
174 self.module_id.problems(&self.tree, db)
175 }
176}
177
178/// Phisically, rust source is organized as a set of files, but logically it is
179/// organized as a tree of modules. Usually, a single file corresponds to a
180/// single module, but it is not nessary the case.
181///
182/// Module encapsulate the logic of transitioning from the fuzzy world of files
183/// (which can have multiple parents) to the precise world of modules (which
184/// always have one parent).
185#[derive(Default, Debug, PartialEq, Eq)]
186pub struct ModuleTree {
187 mods: Arena<ModuleData>,
188 links: Arena<LinkData>,
189}
190
191impl ModuleTree {
192 pub(crate) fn modules<'a>(&'a self) -> impl Iterator<Item = ModuleId> + 'a {
193 self.mods.iter().map(|(id, _)| id)
194 }
195
196 fn modules_for_source(&self, source: ModuleSource) -> Vec<ModuleId> {
197 self.mods
198 .iter()
199 .filter(|(_idx, it)| it.source == source)
200 .map(|(idx, _)| idx)
201 .collect()
202 }
203
204 fn any_module_for_source(&self, source: ModuleSource) -> Option<ModuleId> {
205 self.modules_for_source(source).pop()
206 }
207}
208
209/// `ModuleSource` is the syntax tree element that produced this module:
210/// either a file, or an inlinde module.
211#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
212pub enum ModuleSource {
213 SourceFile(FileId),
214 Module(SourceItemId),
215}
216
217/// An owned syntax node for a module. Unlike `ModuleSource`,
218/// this holds onto the AST for the whole file.
219pub(crate) enum ModuleSourceNode {
220 SourceFile(ast::SourceFileNode),
221 Module(ast::ModuleNode),
222}
223
224pub type ModuleId = Id<ModuleData>;
225type LinkId = Id<LinkData>;
226
227#[derive(Clone, Debug, Hash, PartialEq, Eq)]
228pub enum Problem {
229 UnresolvedModule {
230 candidate: RelativePathBuf,
231 },
232 NotDirOwner {
233 move_to: RelativePathBuf,
234 candidate: RelativePathBuf,
235 },
236}
237
238impl ModuleId {
239 pub(crate) fn source(self, tree: &ModuleTree) -> ModuleSource {
240 tree.mods[self].source
241 }
242 fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> {
243 tree.mods[self].parent
244 }
245 fn parent(self, tree: &ModuleTree) -> Option<ModuleId> {
246 let link = self.parent_link(tree)?;
247 Some(tree.links[link].owner)
248 }
249 fn crate_root(self, tree: &ModuleTree) -> ModuleId {
250 generate(Some(self), move |it| it.parent(tree))
251 .last()
252 .unwrap()
253 }
254 fn child(self, tree: &ModuleTree, name: &str) -> Option<ModuleId> {
255 let link = tree.mods[self]
256 .children
257 .iter()
258 .map(|&it| &tree.links[it])
259 .find(|it| it.name == name)?;
260 Some(*link.points_to.first()?)
261 }
262 fn children<'a>(self, tree: &'a ModuleTree) -> impl Iterator<Item = (SmolStr, ModuleId)> + 'a {
263 tree.mods[self].children.iter().filter_map(move |&it| {
264 let link = &tree.links[it];
265 let module = *link.points_to.first()?;
266 Some((link.name.clone(), module))
267 })
268 }
269 fn problems(self, tree: &ModuleTree, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
270 tree.mods[self]
271 .children
272 .iter()
273 .filter_map(|&it| {
274 let p = tree.links[it].problem.clone()?;
275 let s = it.bind_source(tree, db);
276 let s = s.borrowed().name().unwrap().syntax().owned();
277 Some((s, p))
278 })
279 .collect()
280 }
281}
282
283impl LinkId {
284 fn owner(self, tree: &ModuleTree) -> ModuleId {
285 tree.links[self].owner
286 }
287 fn name(self, tree: &ModuleTree) -> SmolStr {
288 tree.links[self].name.clone()
289 }
290 fn bind_source<'a>(self, tree: &ModuleTree, db: &impl HirDatabase) -> ast::ModuleNode {
291 let owner = self.owner(tree);
292 match owner.source(tree).resolve(db) {
293 ModuleSourceNode::SourceFile(root) => {
294 let ast = imp::modules(root.borrowed())
295 .find(|(name, _)| name == &tree.links[self].name)
296 .unwrap()
297 .1;
298 ast.owned()
299 }
300 ModuleSourceNode::Module(it) => it,
301 }
302 }
303}
304
305#[derive(Debug, PartialEq, Eq, Hash)]
306pub struct ModuleData {
307 source: ModuleSource,
308 parent: Option<LinkId>,
309 children: Vec<LinkId>,
310}
311
312impl ModuleSource {
313 pub(crate) fn new_inline(
314 db: &impl HirDatabase,
315 file_id: FileId,
316 module: ast::Module,
317 ) -> ModuleSource {
318 assert!(!module.has_semi());
319 let items = db.file_items(file_id);
320 let item_id = items.id_of(module.syntax());
321 let id = SourceItemId { file_id, item_id };
322 ModuleSource::Module(id)
323 }
324
325 pub fn as_file(self) -> Option<FileId> {
326 match self {
327 ModuleSource::SourceFile(f) => Some(f),
328 ModuleSource::Module(..) => None,
329 }
330 }
331
332 pub fn file_id(self) -> FileId {
333 match self {
334 ModuleSource::SourceFile(f) => f,
335 ModuleSource::Module(source_item_id) => source_item_id.file_id,
336 }
337 }
338
339 pub(crate) fn resolve(self, db: &impl HirDatabase) -> ModuleSourceNode {
340 match self {
341 ModuleSource::SourceFile(file_id) => {
342 let syntax = db.source_file(file_id);
343 ModuleSourceNode::SourceFile(syntax.ast().owned())
344 }
345 ModuleSource::Module(item_id) => {
346 let syntax = db.file_item(item_id);
347 let syntax = syntax.borrowed();
348 let module = ast::Module::cast(syntax).unwrap();
349 ModuleSourceNode::Module(module.owned())
350 }
351 }
352 }
353}
354
355#[derive(Hash, Debug, PartialEq, Eq)]
356struct LinkData {
357 owner: ModuleId,
358 name: SmolStr,
359 points_to: Vec<ModuleId>,
360 problem: Option<Problem>,
361}
362
363impl ModuleTree {
364 fn push_mod(&mut self, data: ModuleData) -> ModuleId {
365 self.mods.alloc(data)
366 }
367 fn push_link(&mut self, data: LinkData) -> LinkId {
368 let owner = data.owner;
369 let id = self.links.alloc(data);
370 self.mods[owner].children.push(id);
371 id
372 }
373}
diff --git a/crates/ra_hir/src/module/nameres.rs b/crates/ra_hir/src/module/nameres.rs
new file mode 100644
index 000000000..8529e16b3
--- /dev/null
+++ b/crates/ra_hir/src/module/nameres.rs
@@ -0,0 +1,434 @@
1//! Name resolution algorithm. The end result of the algorithm is `ItemMap`: a
2//! map with maps each module to it's scope: the set of items, visible in the
3//! module. That is, we only resolve imports here, name resolution of item
4//! bodies will be done in a separate step.
5//!
6//! Like Rustc, we use an interative per-crate algorithm: we start with scopes
7//! containing only directly defined items, and then iteratively resolve
8//! imports.
9//!
10//! To make this work nicely in the IDE scenarios, we place `InputModuleItems`
11//! in between raw syntax and name resolution. `InputModuleItems` are computed
12//! using only the module's syntax, and it is all directly defined items plus
13//! imports. The plain is to make `InputModuleItems` independent of local
14//! modifications (that is, typing inside a function shold not change IMIs),
15//! such that the results of name resolution can be preserved unless the module
16//! structure itself is modified.
17use std::{
18 sync::Arc,
19};
20
21use rustc_hash::FxHashMap;
22use ra_syntax::{
23 TextRange,
24 SmolStr, SyntaxKind::{self, *},
25 ast::{self, AstNode}
26};
27use ra_db::SourceRootId;
28
29use crate::{
30 Cancelable, FileId,
31 DefId, DefLoc,
32 SourceItemId, SourceFileItemId, SourceFileItems,
33 Path, PathKind,
34 HirDatabase,
35 module::{ModuleId, ModuleTree},
36};
37
38/// Item map is the result of the name resolution. Item map contains, for each
39/// module, the set of visible items.
40#[derive(Default, Debug, PartialEq, Eq)]
41pub struct ItemMap {
42 pub per_module: FxHashMap<ModuleId, ModuleScope>,
43}
44
45#[derive(Debug, Default, PartialEq, Eq, Clone)]
46pub struct ModuleScope {
47 items: FxHashMap<SmolStr, Resolution>,
48}
49
50impl ModuleScope {
51 pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a SmolStr, &Resolution)> + 'a {
52 self.items.iter()
53 }
54 pub fn get(&self, name: &SmolStr) -> Option<&Resolution> {
55 self.items.get(name)
56 }
57}
58
59/// A set of items and imports declared inside a module, without relation to
60/// other modules.
61///
62/// This stands in-between raw syntax and name resolution and alow us to avoid
63/// recomputing name res: if `InputModuleItems` are the same, we can avoid
64/// running name resolution.
65#[derive(Debug, Default, PartialEq, Eq)]
66pub struct InputModuleItems {
67 items: Vec<ModuleItem>,
68 imports: Vec<Import>,
69}
70
71#[derive(Debug, PartialEq, Eq)]
72struct ModuleItem {
73 id: SourceFileItemId,
74 name: SmolStr,
75 kind: SyntaxKind,
76 vis: Vis,
77}
78
79#[derive(Debug, PartialEq, Eq)]
80enum Vis {
81 // Priv,
82 Other,
83}
84
85#[derive(Debug, Clone, PartialEq, Eq)]
86struct Import {
87 path: Path,
88 kind: ImportKind,
89}
90
91#[derive(Debug, Clone, Copy, PartialEq, Eq)]
92pub struct NamedImport {
93 pub file_item_id: SourceFileItemId,
94 pub relative_range: TextRange,
95}
96
97impl NamedImport {
98 pub fn range(&self, db: &impl HirDatabase, file_id: FileId) -> TextRange {
99 let source_item_id = SourceItemId {
100 file_id,
101 item_id: self.file_item_id,
102 };
103 let syntax = db.file_item(source_item_id);
104 let offset = syntax.borrowed().range().start();
105 self.relative_range + offset
106 }
107}
108
109#[derive(Debug, Clone, PartialEq, Eq)]
110enum ImportKind {
111 Glob,
112 Named(NamedImport),
113}
114
115/// Resolution is basically `DefId` atm, but it should account for stuff like
116/// multiple namespaces, ambiguity and errors.
117#[derive(Debug, Clone, PartialEq, Eq)]
118pub struct Resolution {
119 /// None for unresolved
120 pub def_id: Option<DefId>,
121 /// ident by whitch this is imported into local scope.
122 pub import: Option<NamedImport>,
123}
124
125// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
126// enum Namespace {
127// Types,
128// Values,
129// }
130
131// #[derive(Debug)]
132// struct PerNs<T> {
133// types: Option<T>,
134// values: Option<T>,
135// }
136
137impl InputModuleItems {
138 pub(crate) fn new<'a>(
139 file_items: &SourceFileItems,
140 items: impl Iterator<Item = ast::ModuleItem<'a>>,
141 ) -> InputModuleItems {
142 let mut res = InputModuleItems::default();
143 for item in items {
144 res.add_item(file_items, item);
145 }
146 res
147 }
148
149 fn add_item(&mut self, file_items: &SourceFileItems, item: ast::ModuleItem) -> Option<()> {
150 match item {
151 ast::ModuleItem::StructDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
152 ast::ModuleItem::EnumDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
153 ast::ModuleItem::FnDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
154 ast::ModuleItem::TraitDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
155 ast::ModuleItem::TypeDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
156 ast::ModuleItem::ImplItem(_) => {
157 // impls don't define items
158 }
159 ast::ModuleItem::UseItem(it) => self.add_use_item(file_items, it),
160 ast::ModuleItem::ExternCrateItem(_) => {
161 // TODO
162 }
163 ast::ModuleItem::ConstDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
164 ast::ModuleItem::StaticDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
165 ast::ModuleItem::Module(it) => self.items.push(ModuleItem::new(file_items, it)?),
166 }
167 Some(())
168 }
169
170 fn add_use_item(&mut self, file_items: &SourceFileItems, item: ast::UseItem) {
171 let file_item_id = file_items.id_of(item.syntax());
172 let start_offset = item.syntax().range().start();
173 Path::expand_use_item(item, |path, range| {
174 let kind = match range {
175 None => ImportKind::Glob,
176 Some(range) => ImportKind::Named(NamedImport {
177 file_item_id,
178 relative_range: range - start_offset,
179 }),
180 };
181 self.imports.push(Import { kind, path })
182 })
183 }
184}
185
186impl ModuleItem {
187 fn new<'a>(file_items: &SourceFileItems, item: impl ast::NameOwner<'a>) -> Option<ModuleItem> {
188 let name = item.name()?.text();
189 let kind = item.syntax().kind();
190 let vis = Vis::Other;
191 let id = file_items.id_of(item.syntax());
192 let res = ModuleItem {
193 id,
194 name,
195 kind,
196 vis,
197 };
198 Some(res)
199 }
200}
201
202pub(crate) struct Resolver<'a, DB> {
203 pub(crate) db: &'a DB,
204 pub(crate) input: &'a FxHashMap<ModuleId, Arc<InputModuleItems>>,
205 pub(crate) source_root: SourceRootId,
206 pub(crate) module_tree: Arc<ModuleTree>,
207 pub(crate) result: ItemMap,
208}
209
210impl<'a, DB> Resolver<'a, DB>
211where
212 DB: HirDatabase,
213{
214 pub(crate) fn resolve(mut self) -> Cancelable<ItemMap> {
215 for (&module_id, items) in self.input.iter() {
216 self.populate_module(module_id, items)
217 }
218
219 for &module_id in self.input.keys() {
220 self.db.check_canceled()?;
221 self.resolve_imports(module_id);
222 }
223 Ok(self.result)
224 }
225
226 fn populate_module(&mut self, module_id: ModuleId, input: &InputModuleItems) {
227 let file_id = module_id.source(&self.module_tree).file_id();
228
229 let mut module_items = ModuleScope::default();
230
231 for import in input.imports.iter() {
232 if let Some(name) = import.path.segments.iter().last() {
233 if let ImportKind::Named(import) = import.kind {
234 module_items.items.insert(
235 name.clone(),
236 Resolution {
237 def_id: None,
238 import: Some(import),
239 },
240 );
241 }
242 }
243 }
244
245 for item in input.items.iter() {
246 if item.kind == MODULE {
247 // handle submodules separatelly
248 continue;
249 }
250 let def_loc = DefLoc::Item {
251 source_item_id: SourceItemId {
252 file_id,
253 item_id: item.id,
254 },
255 };
256 let def_id = def_loc.id(self.db);
257 let resolution = Resolution {
258 def_id: Some(def_id),
259 import: None,
260 };
261 module_items.items.insert(item.name.clone(), resolution);
262 }
263
264 for (name, mod_id) in module_id.children(&self.module_tree) {
265 let def_loc = DefLoc::Module {
266 id: mod_id,
267 source_root: self.source_root,
268 };
269 let def_id = def_loc.id(self.db);
270 let resolution = Resolution {
271 def_id: Some(def_id),
272 import: None,
273 };
274 module_items.items.insert(name, resolution);
275 }
276
277 self.result.per_module.insert(module_id, module_items);
278 }
279
280 fn resolve_imports(&mut self, module_id: ModuleId) {
281 for import in self.input[&module_id].imports.iter() {
282 self.resolve_import(module_id, import);
283 }
284 }
285
286 fn resolve_import(&mut self, module_id: ModuleId, import: &Import) {
287 let ptr = match import.kind {
288 ImportKind::Glob => return,
289 ImportKind::Named(ptr) => ptr,
290 };
291
292 let mut curr = match import.path.kind {
293 // TODO: handle extern crates
294 PathKind::Plain => return,
295 PathKind::Self_ => module_id,
296 PathKind::Super => {
297 match module_id.parent(&self.module_tree) {
298 Some(it) => it,
299 // TODO: error
300 None => return,
301 }
302 }
303 PathKind::Crate => module_id.crate_root(&self.module_tree),
304 };
305
306 for (i, name) in import.path.segments.iter().enumerate() {
307 let is_last = i == import.path.segments.len() - 1;
308
309 let def_id = match self.result.per_module[&curr].items.get(name) {
310 None => return,
311 Some(res) => match res.def_id {
312 Some(it) => it,
313 None => return,
314 },
315 };
316
317 if !is_last {
318 curr = match def_id.loc(self.db) {
319 DefLoc::Module { id, .. } => id,
320 _ => return,
321 }
322 } else {
323 self.update(module_id, |items| {
324 let res = Resolution {
325 def_id: Some(def_id),
326 import: Some(ptr),
327 };
328 items.items.insert(name.clone(), res);
329 })
330 }
331 }
332 }
333
334 fn update(&mut self, module_id: ModuleId, f: impl FnOnce(&mut ModuleScope)) {
335 let module_items = self.result.per_module.get_mut(&module_id).unwrap();
336 f(module_items)
337 }
338}
339
340#[cfg(test)]
341mod tests {
342 use std::sync::Arc;
343
344 use salsa::Database;
345 use ra_db::FilesDatabase;
346 use ra_syntax::SmolStr;
347
348 use crate::{
349 self as hir,
350 db::HirDatabase,
351 mock::MockDatabase,
352};
353
354 fn item_map(fixture: &str) -> (Arc<hir::ItemMap>, hir::ModuleId) {
355 let (db, pos) = MockDatabase::with_position(fixture);
356 let source_root = db.file_source_root(pos.file_id);
357 let module = hir::Module::guess_from_position(&db, pos).unwrap().unwrap();
358 let module_id = module.module_id;
359 (db.item_map(source_root).unwrap(), module_id)
360 }
361
362 #[test]
363 fn test_item_map() {
364 let (item_map, module_id) = item_map(
365 "
366 //- /lib.rs
367 mod foo;
368
369 use crate::foo::bar::Baz;
370 <|>
371
372 //- /foo/mod.rs
373 pub mod bar;
374
375 //- /foo/bar.rs
376 pub struct Baz;
377 ",
378 );
379 let name = SmolStr::from("Baz");
380 let resolution = &item_map.per_module[&module_id].items[&name];
381 assert!(resolution.def_id.is_some());
382 }
383
384 #[test]
385 fn typing_inside_a_function_should_not_invalidate_item_map() {
386 let (mut db, pos) = MockDatabase::with_position(
387 "
388 //- /lib.rs
389 mod foo;<|>
390
391 use crate::foo::bar::Baz;
392
393 fn foo() -> i32 {
394 1 + 1
395 }
396 //- /foo/mod.rs
397 pub mod bar;
398
399 //- /foo/bar.rs
400 pub struct Baz;
401 ",
402 );
403 let source_root = db.file_source_root(pos.file_id);
404 {
405 let events = db.log_executed(|| {
406 db.item_map(source_root).unwrap();
407 });
408 assert!(format!("{:?}", events).contains("item_map"))
409 }
410
411 let new_text = "
412 mod foo;
413
414 use crate::foo::bar::Baz;
415
416 fn foo() -> i32 { 92 }
417 "
418 .to_string();
419
420 db.query_mut(ra_db::FileTextQuery)
421 .set(pos.file_id, Arc::new(new_text));
422
423 {
424 let events = db.log_executed(|| {
425 db.item_map(source_root).unwrap();
426 });
427 assert!(
428 !format!("{:?}", events).contains("_item_map"),
429 "{:#?}",
430 events
431 )
432 }
433 }
434}
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs
new file mode 100644
index 000000000..4a2e427cd
--- /dev/null
+++ b/crates/ra_hir/src/path.rs
@@ -0,0 +1,148 @@
1use ra_syntax::{SmolStr, ast, AstNode, TextRange};
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub struct Path {
5 pub kind: PathKind,
6 pub segments: Vec<SmolStr>,
7}
8
9#[derive(Debug, Clone, Copy, PartialEq, Eq)]
10pub enum PathKind {
11 Plain,
12 Self_,
13 Super,
14 Crate,
15}
16
17impl Path {
18 /// Calls `cb` with all paths, represented by this use item.
19 pub fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option<TextRange>)) {
20 if let Some(tree) = item.use_tree() {
21 expand_use_tree(None, tree, &mut cb);
22 }
23 }
24
25 /// Converts an `ast::Path` to `Path`. Works with use trees.
26 pub fn from_ast(mut path: ast::Path) -> Option<Path> {
27 let mut kind = PathKind::Plain;
28 let mut segments = Vec::new();
29 loop {
30 let segment = path.segment()?;
31 match segment.kind()? {
32 ast::PathSegmentKind::Name(name) => segments.push(name.text()),
33 ast::PathSegmentKind::CrateKw => {
34 kind = PathKind::Crate;
35 break;
36 }
37 ast::PathSegmentKind::SelfKw => {
38 kind = PathKind::Self_;
39 break;
40 }
41 ast::PathSegmentKind::SuperKw => {
42 kind = PathKind::Super;
43 break;
44 }
45 }
46 path = match qualifier(path) {
47 Some(it) => it,
48 None => break,
49 };
50 }
51 segments.reverse();
52 return Some(Path { kind, segments });
53
54 fn qualifier(path: ast::Path) -> Option<ast::Path> {
55 if let Some(q) = path.qualifier() {
56 return Some(q);
57 }
58 // TODO: this bottom up traversal is not too precise.
59 // Should we handle do a top-down analysiss, recording results?
60 let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
61 let use_tree = use_tree_list.parent_use_tree();
62 use_tree.path()
63 }
64 }
65
66 /// `true` is this path is a single identifier, like `foo`
67 pub fn is_ident(&self) -> bool {
68 self.kind == PathKind::Plain && self.segments.len() == 1
69 }
70}
71
72fn expand_use_tree(
73 prefix: Option<Path>,
74 tree: ast::UseTree,
75 cb: &mut impl FnMut(Path, Option<TextRange>),
76) {
77 if let Some(use_tree_list) = tree.use_tree_list() {
78 let prefix = match tree.path() {
79 None => prefix,
80 Some(path) => match convert_path(prefix, path) {
81 Some(it) => Some(it),
82 None => return, // TODO: report errors somewhere
83 },
84 };
85 for tree in use_tree_list.use_trees() {
86 expand_use_tree(prefix.clone(), tree, cb);
87 }
88 } else {
89 if let Some(ast_path) = tree.path() {
90 if let Some(path) = convert_path(prefix, ast_path) {
91 let range = if tree.has_star() {
92 None
93 } else {
94 let range = ast_path.segment().unwrap().syntax().range();
95 Some(range)
96 };
97 cb(path, range)
98 }
99 }
100 }
101}
102
103fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> {
104 let prefix = if let Some(qual) = path.qualifier() {
105 Some(convert_path(prefix, qual)?)
106 } else {
107 None
108 };
109 let segment = path.segment()?;
110 let res = match segment.kind()? {
111 ast::PathSegmentKind::Name(name) => {
112 let mut res = prefix.unwrap_or_else(|| Path {
113 kind: PathKind::Plain,
114 segments: Vec::with_capacity(1),
115 });
116 res.segments.push(name.text());
117 res
118 }
119 ast::PathSegmentKind::CrateKw => {
120 if prefix.is_some() {
121 return None;
122 }
123 Path {
124 kind: PathKind::Crate,
125 segments: Vec::new(),
126 }
127 }
128 ast::PathSegmentKind::SelfKw => {
129 if prefix.is_some() {
130 return None;
131 }
132 Path {
133 kind: PathKind::Self_,
134 segments: Vec::new(),
135 }
136 }
137 ast::PathSegmentKind::SuperKw => {
138 if prefix.is_some() {
139 return None;
140 }
141 Path {
142 kind: PathKind::Super,
143 segments: Vec::new(),
144 }
145 }
146 };
147 Some(res)
148}
diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs
new file mode 100644
index 000000000..6f602878c
--- /dev/null
+++ b/crates/ra_hir/src/query_definitions.rs
@@ -0,0 +1,154 @@
1use std::{
2 sync::Arc,
3 time::Instant,
4};
5
6use rustc_hash::FxHashMap;
7use ra_syntax::{
8 AstNode, SyntaxNode, SmolStr,
9 ast::{self, FnDef, FnDefNode, NameOwner, ModuleItemOwner}
10};
11use ra_db::{SourceRootId, FileId, Cancelable,};
12
13use crate::{
14 FnId,
15 SourceFileItems, SourceItemId,
16 db::HirDatabase,
17 function::FnScopes,
18 module::{
19 ModuleSource, ModuleSourceNode, ModuleId,
20 imp::Submodule,
21 nameres::{InputModuleItems, ItemMap, Resolver},
22 },
23};
24
25/// Resolve `FnId` to the corresponding `SyntaxNode`
26pub(super) fn fn_syntax(db: &impl HirDatabase, fn_id: FnId) -> FnDefNode {
27 let item_id = fn_id.loc(db);
28 let syntax = db.file_item(item_id);
29 FnDef::cast(syntax.borrowed()).unwrap().owned()
30}
31
32pub(super) fn fn_scopes(db: &impl HirDatabase, fn_id: FnId) -> Arc<FnScopes> {
33 let syntax = db.fn_syntax(fn_id);
34 let res = FnScopes::new(syntax.borrowed());
35 Arc::new(res)
36}
37
38pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> {
39 let source_file = db.source_file(file_id);
40 let source_file = source_file.borrowed();
41 let mut res = SourceFileItems::default();
42 source_file
43 .syntax()
44 .descendants()
45 .filter_map(ast::ModuleItem::cast)
46 .map(|it| it.syntax().owned())
47 .for_each(|it| {
48 res.alloc(it);
49 });
50 Arc::new(res)
51}
52
53pub(super) fn file_item(db: &impl HirDatabase, source_item_id: SourceItemId) -> SyntaxNode {
54 db.file_items(source_item_id.file_id)[source_item_id.item_id].clone()
55}
56
57pub(crate) fn submodules(
58 db: &impl HirDatabase,
59 source: ModuleSource,
60) -> Cancelable<Arc<Vec<Submodule>>> {
61 db.check_canceled()?;
62 let file_id = source.file_id();
63 let submodules = match source.resolve(db) {
64 ModuleSourceNode::SourceFile(it) => collect_submodules(db, file_id, it.borrowed()),
65 ModuleSourceNode::Module(it) => it
66 .borrowed()
67 .item_list()
68 .map(|it| collect_submodules(db, file_id, it))
69 .unwrap_or_else(Vec::new),
70 };
71 return Ok(Arc::new(submodules));
72
73 fn collect_submodules<'a>(
74 db: &impl HirDatabase,
75 file_id: FileId,
76 root: impl ast::ModuleItemOwner<'a>,
77 ) -> Vec<Submodule> {
78 modules(root)
79 .map(|(name, m)| {
80 if m.has_semi() {
81 Submodule::Declaration(name)
82 } else {
83 let src = ModuleSource::new_inline(db, file_id, m);
84 Submodule::Definition(name, src)
85 }
86 })
87 .collect()
88 }
89}
90
91pub(crate) fn modules<'a>(
92 root: impl ast::ModuleItemOwner<'a>,
93) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
94 root.items()
95 .filter_map(|item| match item {
96 ast::ModuleItem::Module(m) => Some(m),
97 _ => None,
98 })
99 .filter_map(|module| {
100 let name = module.name()?.text();
101 Some((name, module))
102 })
103}
104
105pub(super) fn input_module_items(
106 db: &impl HirDatabase,
107 source_root: SourceRootId,
108 module_id: ModuleId,
109) -> Cancelable<Arc<InputModuleItems>> {
110 let module_tree = db.module_tree(source_root)?;
111 let source = module_id.source(&module_tree);
112 let file_items = db.file_items(source.file_id());
113 let res = match source.resolve(db) {
114 ModuleSourceNode::SourceFile(it) => {
115 let items = it.borrowed().items();
116 InputModuleItems::new(&file_items, items)
117 }
118 ModuleSourceNode::Module(it) => {
119 let items = it
120 .borrowed()
121 .item_list()
122 .into_iter()
123 .flat_map(|it| it.items());
124 InputModuleItems::new(&file_items, items)
125 }
126 };
127 Ok(Arc::new(res))
128}
129
130pub(super) fn item_map(
131 db: &impl HirDatabase,
132 source_root: SourceRootId,
133) -> Cancelable<Arc<ItemMap>> {
134 let start = Instant::now();
135 let module_tree = db.module_tree(source_root)?;
136 let input = module_tree
137 .modules()
138 .map(|id| {
139 let items = db.input_module_items(source_root, id)?;
140 Ok((id, items))
141 })
142 .collect::<Cancelable<FxHashMap<_, _>>>()?;
143 let resolver = Resolver {
144 db: db,
145 input: &input,
146 source_root,
147 module_tree,
148 result: ItemMap::default(),
149 };
150 let res = resolver.resolve()?;
151 let elapsed = start.elapsed();
152 log::info!("item_map: {:?}", elapsed);
153 Ok(Arc::new(res))
154}
diff --git a/crates/ra_lsp_server/src/caps.rs b/crates/ra_lsp_server/src/caps.rs
index bcf857fce..560f64989 100644
--- a/crates/ra_lsp_server/src/caps.rs
+++ b/crates/ra_lsp_server/src/caps.rs
@@ -19,7 +19,7 @@ pub fn server_capabilities() -> ServerCapabilities {
19 hover_provider: Some(true), 19 hover_provider: Some(true),
20 completion_provider: Some(CompletionOptions { 20 completion_provider: Some(CompletionOptions {
21 resolve_provider: None, 21 resolve_provider: None,
22 trigger_characters: None, 22 trigger_characters: Some(vec![":".to_string()]),
23 }), 23 }),
24 signature_help_provider: Some(SignatureHelpOptions { 24 signature_help_provider: Some(SignatureHelpOptions {
25 trigger_characters: Some(vec!["(".to_string(), ",".to_string(), ")".to_string()]), 25 trigger_characters: Some(vec!["(".to_string(), ",".to_string(), ")".to_string()]),
diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs
index e5a2449c2..28368787c 100644
--- a/crates/ra_lsp_server/src/conv.rs
+++ b/crates/ra_lsp_server/src/conv.rs
@@ -49,10 +49,9 @@ impl ConvWith for Position {
49 type Output = TextUnit; 49 type Output = TextUnit;
50 50
51 fn conv_with(self, line_index: &LineIndex) -> TextUnit { 51 fn conv_with(self, line_index: &LineIndex) -> TextUnit {
52 // TODO: UTF-16
53 let line_col = LineCol { 52 let line_col = LineCol {
54 line: self.line as u32, 53 line: self.line as u32,
55 col: (self.character as u32).into(), 54 col_utf16: self.character as u32,
56 }; 55 };
57 line_index.offset(line_col) 56 line_index.offset(line_col)
58 } 57 }
@@ -64,8 +63,7 @@ impl ConvWith for TextUnit {
64 63
65 fn conv_with(self, line_index: &LineIndex) -> Position { 64 fn conv_with(self, line_index: &LineIndex) -> Position {
66 let line_col = line_index.line_col(self); 65 let line_col = line_index.line_col(self);
67 // TODO: UTF-16 66 Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16))
68 Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col)))
69 } 67 }
70} 68}
71 69
@@ -204,7 +202,7 @@ impl TryConvWith for SourceChange {
204 .unwrap_or(&[]); 202 .unwrap_or(&[]);
205 let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits); 203 let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits);
206 let position = 204 let position =
207 Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col))); 205 Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16));
208 Some(TextDocumentPositionParams { 206 Some(TextDocumentPositionParams {
209 text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?), 207 text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?),
210 position, 208 position,
@@ -247,12 +245,12 @@ fn translate_offset_with_edit(
247 if in_edit_line_col.line == 0 { 245 if in_edit_line_col.line == 0 {
248 LineCol { 246 LineCol {
249 line: edit_line_col.line, 247 line: edit_line_col.line,
250 col: edit_line_col.col + in_edit_line_col.col, 248 col_utf16: edit_line_col.col_utf16 + in_edit_line_col.col_utf16,
251 } 249 }
252 } else { 250 } else {
253 LineCol { 251 LineCol {
254 line: edit_line_col.line + in_edit_line_col.line, 252 line: edit_line_col.line + in_edit_line_col.line,
255 col: in_edit_line_col.col, 253 col_utf16: in_edit_line_col.col_utf16,
256 } 254 }
257 } 255 }
258} 256}
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs
index 26bcddd8e..8301a1044 100644
--- a/crates/ra_lsp_server/src/main.rs
+++ b/crates/ra_lsp_server/src/main.rs
@@ -65,27 +65,3 @@ fn main_inner() -> Result<()> {
65 info!("... IO is down"); 65 info!("... IO is down");
66 Ok(()) 66 Ok(())
67} 67}
68
69/*
70 (let ((backend (eglot-xref-backend)))
71 (mapcar
72 (lambda (xref)
73 (let ((loc (xref-item-location xref)))
74 (propertize
75 (concat
76 (when (xref-file-location-p loc)
77 (with-slots (file line column) loc
78 (format "%s:%s:%s:"
79 (propertize (file-relative-name file)
80 'face 'compilation-info)
81 (propertize (format "%s" line)
82 'face 'compilation-line
83 )
84 column)))
85 (xref-item-summary xref))
86 'xref xref)))
87 (xref-backend-apropos backend "Analysis"))
88 )
89
90
91*/
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs
index c872b0dc4..6d5622b15 100644
--- a/crates/ra_lsp_server/src/main_loop/handlers.rs
+++ b/crates/ra_lsp_server/src/main_loop/handlers.rs
@@ -9,7 +9,7 @@ use languageserver_types::{
9 WorkspaceEdit, ParameterInformation, SignatureInformation, Hover, HoverContents, 9 WorkspaceEdit, ParameterInformation, SignatureInformation, Hover, HoverContents,
10}; 10};
11use ra_analysis::{FileId, FoldKind, Query, RunnableKind, FilePosition}; 11use ra_analysis::{FileId, FoldKind, Query, RunnableKind, FilePosition};
12use ra_syntax::text_utils::contains_offset_nonstrict; 12use ra_syntax::{TextUnit, text_utils::contains_offset_nonstrict};
13use rustc_hash::FxHashMap; 13use rustc_hash::FxHashMap;
14use serde_json::to_value; 14use serde_json::to_value;
15 15
@@ -381,6 +381,28 @@ pub fn handle_completion(
381 let offset = params.position.conv_with(&line_index); 381 let offset = params.position.conv_with(&line_index);
382 FilePosition { file_id, offset } 382 FilePosition { file_id, offset }
383 }; 383 };
384 let completion_triggered_after_single_colon = {
385 let mut res = false;
386 if let Some(ctx) = params.context {
387 if ctx.trigger_character.unwrap_or_default() == ":" {
388 let source_file = world.analysis().file_syntax(position.file_id);
389 let syntax = source_file.syntax();
390 let text = syntax.text();
391 if let Some(next_char) = text.char_at(position.offset) {
392 let diff = TextUnit::of_char(next_char) + TextUnit::of_char(':');
393 let prev_char = position.offset - diff;
394 if text.char_at(prev_char) != Some(':') {
395 res = true;
396 }
397 }
398 }
399 }
400 res
401 };
402 if completion_triggered_after_single_colon {
403 return Ok(None);
404 }
405
384 let items = match world.analysis().completions(position)? { 406 let items = match world.analysis().completions(position)? {
385 None => return Ok(None), 407 None => return Ok(None),
386 Some(items) => items, 408 Some(items) => items,
@@ -545,10 +567,13 @@ pub fn handle_rename(world: ServerWorld, params: RenameParams) -> Result<Option<
545 let mut changes = HashMap::new(); 567 let mut changes = HashMap::new();
546 for r in refs { 568 for r in refs {
547 if let Ok(loc) = to_location(r.0, r.1, &world, &line_index) { 569 if let Ok(loc) = to_location(r.0, r.1, &world, &line_index) {
548 changes.entry(loc.uri).or_insert(Vec::new()).push(TextEdit { 570 changes
549 range: loc.range, 571 .entry(loc.uri)
550 new_text: params.new_name.clone(), 572 .or_insert_with(Vec::new)
551 }); 573 .push(TextEdit {
574 range: loc.range,
575 new_text: params.new_name.clone(),
576 });
552 } 577 }
553 } 578 }
554 579
diff --git a/crates/ra_lsp_server/src/main_loop/mod.rs b/crates/ra_lsp_server/src/main_loop/mod.rs
index 78d93741a..36f08be2f 100644
--- a/crates/ra_lsp_server/src/main_loop/mod.rs
+++ b/crates/ra_lsp_server/src/main_loop/mod.rs
@@ -168,9 +168,35 @@ fn main_loop_inner(
168 let workspaces = vec![ws]; 168 let workspaces = vec![ws];
169 feedback(internal_mode, "workspace loaded", msg_sender); 169 feedback(internal_mode, "workspace loaded", msg_sender);
170 for ws in workspaces.iter() { 170 for ws in workspaces.iter() {
171 for pkg in ws.packages().filter(|pkg| !pkg.is_member(ws)) { 171 // Add each library as constant input. If library is
172 debug!("sending root, {}", pkg.root(ws).to_path_buf().display()); 172 // within the workspace, don't treat it as a library.
173 fs_worker.send(pkg.root(ws).to_path_buf()); 173 //
174 // HACK: If source roots are nested, pick the outer one.
175
176 let mut roots = ws
177 .packages()
178 .filter(|pkg| !pkg.is_member(ws))
179 .filter_map(|pkg| {
180 let root = pkg.root(ws).to_path_buf();
181 if root.starts_with(&ws_root) {
182 None
183 } else {
184 Some(root)
185 }
186 })
187 .collect::<Vec<_>>();
188 roots.sort_by_key(|it| it.as_os_str().len());
189 let unique = roots
190 .iter()
191 .enumerate()
192 .filter(|&(idx, long)| {
193 !roots[..idx].iter().any(|short| long.starts_with(short))
194 })
195 .map(|(_idx, root)| root);
196
197 for root in unique {
198 debug!("sending root, {}", root.display());
199 fs_worker.send(root.to_owned());
174 } 200 }
175 } 201 }
176 state.set_workspaces(workspaces); 202 state.set_workspaces(workspaces);
diff --git a/crates/ra_lsp_server/src/path_map.rs b/crates/ra_lsp_server/src/path_map.rs
index 87eabf9be..02e54629c 100644
--- a/crates/ra_lsp_server/src/path_map.rs
+++ b/crates/ra_lsp_server/src/path_map.rs
@@ -43,7 +43,7 @@ impl PathMap {
43 (inserted, file_id) 43 (inserted, file_id)
44 } 44 }
45 pub fn get_id(&self, path: &Path) -> Option<FileId> { 45 pub fn get_id(&self, path: &Path) -> Option<FileId> {
46 self.path2id.get(path).map(|&id| id) 46 self.path2id.get(path).cloned()
47 } 47 }
48 pub fn get_path(&self, file_id: FileId) -> &Path { 48 pub fn get_path(&self, file_id: FileId) -> &Path {
49 self.id2path.get(&file_id).unwrap().as_path() 49 self.id2path.get(&file_id).unwrap().as_path()
@@ -79,6 +79,10 @@ impl FileResolver for PathMap {
79 let path = normalize(&path); 79 let path = normalize(&path);
80 self.get_id(&path) 80 self.get_id(&path)
81 } 81 }
82
83 fn debug_path(&self, file_id: FileId) -> Option<PathBuf> {
84 Some(self.get_path(file_id).to_owned())
85 }
82} 86}
83 87
84fn normalize(path: &Path) -> PathBuf { 88fn normalize(path: &Path) -> PathBuf {
diff --git a/crates/ra_lsp_server/src/server_world.rs b/crates/ra_lsp_server/src/server_world.rs
index 3e7670fcc..12faeb93a 100644
--- a/crates/ra_lsp_server/src/server_world.rs
+++ b/crates/ra_lsp_server/src/server_world.rs
@@ -140,7 +140,7 @@ impl ServerWorldState {
140 Ok(file_id) 140 Ok(file_id)
141 } 141 }
142 pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) { 142 pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) {
143 let mut crate_graph = CrateGraph::new(); 143 let mut crate_graph = CrateGraph::default();
144 ws.iter() 144 ws.iter()
145 .flat_map(|ws| { 145 .flat_map(|ws| {
146 ws.packages() 146 ws.packages()
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs
index 2e9ae263a..bf056131e 100644
--- a/crates/ra_syntax/src/ast/generated.rs
+++ b/crates/ra_syntax/src/ast/generated.rs
@@ -372,6 +372,80 @@ impl<R: TreeRoot<RaTypes>> BreakExprNode<R> {
372 372
373impl<'a> BreakExpr<'a> {} 373impl<'a> BreakExpr<'a> {}
374 374
375// Byte
376#[derive(Debug, Clone, Copy,)]
377pub struct ByteNode<R: TreeRoot<RaTypes> = OwnedRoot> {
378 pub(crate) syntax: SyntaxNode<R>,
379}
380pub type Byte<'a> = ByteNode<RefRoot<'a>>;
381
382impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ByteNode<R1>> for ByteNode<R2> {
383 fn eq(&self, other: &ByteNode<R1>) -> bool { self.syntax == other.syntax }
384}
385impl<R: TreeRoot<RaTypes>> Eq for ByteNode<R> {}
386impl<R: TreeRoot<RaTypes>> Hash for ByteNode<R> {
387 fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
388}
389
390impl<'a> AstNode<'a> for Byte<'a> {
391 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
392 match syntax.kind() {
393 BYTE => Some(Byte { syntax }),
394 _ => None,
395 }
396 }
397 fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
398}
399
400impl<R: TreeRoot<RaTypes>> ByteNode<R> {
401 pub fn borrowed(&self) -> Byte {
402 ByteNode { syntax: self.syntax.borrowed() }
403 }
404 pub fn owned(&self) -> ByteNode {
405 ByteNode { syntax: self.syntax.owned() }
406 }
407}
408
409
410impl<'a> Byte<'a> {}
411
412// ByteString
413#[derive(Debug, Clone, Copy,)]
414pub struct ByteStringNode<R: TreeRoot<RaTypes> = OwnedRoot> {
415 pub(crate) syntax: SyntaxNode<R>,
416}
417pub type ByteString<'a> = ByteStringNode<RefRoot<'a>>;
418
419impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ByteStringNode<R1>> for ByteStringNode<R2> {
420 fn eq(&self, other: &ByteStringNode<R1>) -> bool { self.syntax == other.syntax }
421}
422impl<R: TreeRoot<RaTypes>> Eq for ByteStringNode<R> {}
423impl<R: TreeRoot<RaTypes>> Hash for ByteStringNode<R> {
424 fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
425}
426
427impl<'a> AstNode<'a> for ByteString<'a> {
428 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
429 match syntax.kind() {
430 BYTE_STRING => Some(ByteString { syntax }),
431 _ => None,
432 }
433 }
434 fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
435}
436
437impl<R: TreeRoot<RaTypes>> ByteStringNode<R> {
438 pub fn borrowed(&self) -> ByteString {
439 ByteStringNode { syntax: self.syntax.borrowed() }
440 }
441 pub fn owned(&self) -> ByteStringNode {
442 ByteStringNode { syntax: self.syntax.owned() }
443 }
444}
445
446
447impl<'a> ByteString<'a> {}
448
375// CallExpr 449// CallExpr
376#[derive(Debug, Clone, Copy,)] 450#[derive(Debug, Clone, Copy,)]
377pub struct CallExprNode<R: TreeRoot<RaTypes> = OwnedRoot> { 451pub struct CallExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
diff --git a/crates/ra_syntax/src/ast/mod.rs b/crates/ra_syntax/src/ast/mod.rs
index f20714ede..91c67119f 100644
--- a/crates/ra_syntax/src/ast/mod.rs
+++ b/crates/ra_syntax/src/ast/mod.rs
@@ -134,6 +134,18 @@ impl<'a> Char<'a> {
134 } 134 }
135} 135}
136 136
137impl<'a> Byte<'a> {
138 pub fn text(&self) -> &SmolStr {
139 &self.syntax().leaf_text().unwrap()
140 }
141}
142
143impl<'a> ByteString<'a> {
144 pub fn text(&self) -> &SmolStr {
145 &self.syntax().leaf_text().unwrap()
146 }
147}
148
137impl<'a> String<'a> { 149impl<'a> String<'a> {
138 pub fn text(&self) -> &SmolStr { 150 pub fn text(&self) -> &SmolStr {
139 &self.syntax().leaf_text().unwrap() 151 &self.syntax().leaf_text().unwrap()
@@ -303,6 +315,12 @@ impl<'a> PathSegment<'a> {
303 } 315 }
304} 316}
305 317
318impl<'a> UseTree<'a> {
319 pub fn has_star(self) -> bool {
320 self.syntax().children().any(|it| it.kind() == STAR)
321 }
322}
323
306impl<'a> UseTreeList<'a> { 324impl<'a> UseTreeList<'a> {
307 pub fn parent_use_tree(self) -> UseTree<'a> { 325 pub fn parent_use_tree(self) -> UseTree<'a> {
308 self.syntax() 326 self.syntax()
diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron
index c3184667e..53cd2118f 100644
--- a/crates/ra_syntax/src/grammar.ron
+++ b/crates/ra_syntax/src/grammar.ron
@@ -412,6 +412,8 @@ Grammar(
412 "RangeExpr": (), 412 "RangeExpr": (),
413 "BinExpr": (), 413 "BinExpr": (),
414 "String": (), 414 "String": (),
415 "Byte": (),
416 "ByteString": (),
415 "Char": (), 417 "Char": (),
416 "Literal": (), 418 "Literal": (),
417 419
diff --git a/crates/ra_syntax/src/grammar/items/mod.rs b/crates/ra_syntax/src/grammar/items/mod.rs
index 06c6b5e6e..682266908 100644
--- a/crates/ra_syntax/src/grammar/items/mod.rs
+++ b/crates/ra_syntax/src/grammar/items/mod.rs
@@ -29,7 +29,7 @@ pub(super) enum ItemFlavor {
29 Trait, 29 Trait,
30} 30}
31 31
32const ITEM_RECOVERY_SET: TokenSet = token_set![ 32pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![
33 FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW, 33 FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW,
34 CRATE_KW 34 CRATE_KW
35]; 35];
diff --git a/crates/ra_syntax/src/grammar/paths.rs b/crates/ra_syntax/src/grammar/paths.rs
index a35a339cc..33a11886c 100644
--- a/crates/ra_syntax/src/grammar/paths.rs
+++ b/crates/ra_syntax/src/grammar/paths.rs
@@ -78,7 +78,7 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) {
78 // use crate::foo; 78 // use crate::foo;
79 SELF_KW | SUPER_KW | CRATE_KW => p.bump(), 79 SELF_KW | SUPER_KW | CRATE_KW => p.bump(),
80 _ => { 80 _ => {
81 p.err_and_bump("expected identifier"); 81 p.err_recover("expected identifier", items::ITEM_RECOVERY_SET);
82 } 82 }
83 }; 83 };
84 } 84 }
diff --git a/crates/ra_syntax/src/reparsing.rs b/crates/ra_syntax/src/reparsing.rs
index d48133166..ddcb8f6f6 100644
--- a/crates/ra_syntax/src/reparsing.rs
+++ b/crates/ra_syntax/src/reparsing.rs
@@ -186,8 +186,10 @@ mod tests {
186 186
187 fn do_check<F>(before: &str, replace_with: &str, reparser: F) 187 fn do_check<F>(before: &str, replace_with: &str, reparser: F)
188 where 188 where
189 for<'a> F: Fn(SyntaxNodeRef<'a>, &AtomEdit) 189 for<'a> F: Fn(
190 -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>, 190 SyntaxNodeRef<'a>,
191 &AtomEdit,
192 ) -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>,
191 { 193 {
192 let (range, before) = extract_range(before); 194 let (range, before) = extract_range(before);
193 let after = replace_range(before.clone(), range, replace_with); 195 let after = replace_range(before.clone(), range, replace_with);
diff --git a/crates/ra_syntax/src/string_lexing.rs b/crates/ra_syntax/src/string_lexing.rs
deleted file mode 100644
index d613bb042..000000000
--- a/crates/ra_syntax/src/string_lexing.rs
+++ /dev/null
@@ -1,414 +0,0 @@
1use self::CharComponentKind::*;
2use rowan::{TextRange, TextUnit};
3
4pub fn parse_string_literal(src: &str) -> StringComponentIterator {
5 StringComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11#[derive(Debug, Eq, PartialEq, Clone)]
12pub struct StringComponent {
13 pub range: TextRange,
14 pub kind: StringComponentKind,
15}
16
17impl StringComponent {
18 fn new(range: TextRange, kind: StringComponentKind) -> StringComponent {
19 StringComponent { range, kind }
20 }
21}
22
23#[derive(Debug, Eq, PartialEq, Clone)]
24pub enum StringComponentKind {
25 IgnoreNewline,
26 Char(CharComponentKind),
27}
28
29pub struct StringComponentIterator<'a> {
30 parser: Parser<'a>,
31 pub has_closing_quote: bool,
32}
33
34impl<'a> Iterator for StringComponentIterator<'a> {
35 type Item = StringComponent;
36 fn next(&mut self) -> Option<StringComponent> {
37 if self.parser.pos == 0 {
38 assert!(
39 self.parser.advance() == '"',
40 "string literal should start with double quotes"
41 );
42 }
43
44 if let Some(component) = self.parser.parse_string_component() {
45 return Some(component);
46 }
47
48 // We get here when there are no char components left to parse
49 if self.parser.peek() == Some('"') {
50 self.parser.advance();
51 self.has_closing_quote = true;
52 }
53
54 assert!(
55 self.parser.peek() == None,
56 "string literal should leave no unparsed input: src = {}, pos = {}, length = {}",
57 self.parser.src,
58 self.parser.pos,
59 self.parser.src.len()
60 );
61
62 None
63 }
64}
65
66pub fn parse_char_literal(src: &str) -> CharComponentIterator {
67 CharComponentIterator {
68 parser: Parser::new(src),
69 has_closing_quote: false,
70 }
71}
72
73#[derive(Debug, Eq, PartialEq, Clone)]
74pub struct CharComponent {
75 pub range: TextRange,
76 pub kind: CharComponentKind,
77}
78
79impl CharComponent {
80 fn new(range: TextRange, kind: CharComponentKind) -> CharComponent {
81 CharComponent { range, kind }
82 }
83}
84
85#[derive(Debug, Eq, PartialEq, Clone)]
86pub enum CharComponentKind {
87 CodePoint,
88 AsciiEscape,
89 AsciiCodeEscape,
90 UnicodeEscape,
91}
92
93pub struct CharComponentIterator<'a> {
94 parser: Parser<'a>,
95 pub has_closing_quote: bool,
96}
97
98impl<'a> Iterator for CharComponentIterator<'a> {
99 type Item = CharComponent;
100 fn next(&mut self) -> Option<CharComponent> {
101 if self.parser.pos == 0 {
102 assert!(
103 self.parser.advance() == '\'',
104 "char literal should start with a quote"
105 );
106 }
107
108 if let Some(component) = self.parser.parse_char_component() {
109 return Some(component);
110 }
111
112 // We get here when there are no char components left to parse
113 if self.parser.peek() == Some('\'') {
114 self.parser.advance();
115 self.has_closing_quote = true;
116 }
117
118 assert!(
119 self.parser.peek() == None,
120 "char literal should leave no unparsed input: src = {}, pos = {}, length = {}",
121 self.parser.src,
122 self.parser.pos,
123 self.parser.src.len()
124 );
125
126 None
127 }
128}
129
130pub struct Parser<'a> {
131 src: &'a str,
132 pos: usize,
133}
134
135impl<'a> Parser<'a> {
136 pub fn new(src: &'a str) -> Parser<'a> {
137 Parser { src, pos: 0 }
138 }
139
140 // Utility methods
141
142 pub fn peek(&self) -> Option<char> {
143 if self.pos == self.src.len() {
144 return None;
145 }
146
147 self.src[self.pos..].chars().next()
148 }
149
150 pub fn advance(&mut self) -> char {
151 let next = self
152 .peek()
153 .expect("cannot advance if end of input is reached");
154 self.pos += next.len_utf8();
155 next
156 }
157
158 pub fn skip_whitespace(&mut self) {
159 while self.peek().map(|c| c.is_whitespace()) == Some(true) {
160 self.advance();
161 }
162 }
163
164 pub fn get_pos(&self) -> TextUnit {
165 (self.pos as u32).into()
166 }
167
168 // Char parsing methods
169
170 fn parse_unicode_escape(&mut self, start: TextUnit) -> CharComponent {
171 match self.peek() {
172 Some('{') => {
173 self.advance();
174
175 // Parse anything until we reach `}`
176 while let Some(next) = self.peek() {
177 self.advance();
178 if next == '}' {
179 break;
180 }
181 }
182
183 let end = self.get_pos();
184 CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
185 }
186 Some(_) | None => {
187 let end = self.get_pos();
188 CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
189 }
190 }
191 }
192
193 fn parse_ascii_code_escape(&mut self, start: TextUnit) -> CharComponent {
194 let code_start = self.get_pos();
195 while let Some(next) = self.peek() {
196 if next == '\'' || (self.get_pos() - code_start == 2.into()) {
197 break;
198 }
199
200 self.advance();
201 }
202
203 let end = self.get_pos();
204 CharComponent::new(TextRange::from_to(start, end), AsciiCodeEscape)
205 }
206
207 fn parse_escape(&mut self, start: TextUnit) -> CharComponent {
208 if self.peek().is_none() {
209 return CharComponent::new(TextRange::from_to(start, start), AsciiEscape);
210 }
211
212 let next = self.advance();
213 let end = self.get_pos();
214 let range = TextRange::from_to(start, end);
215 match next {
216 'x' => self.parse_ascii_code_escape(start),
217 'u' => self.parse_unicode_escape(start),
218 _ => CharComponent::new(range, AsciiEscape),
219 }
220 }
221
222 pub fn parse_char_component(&mut self) -> Option<CharComponent> {
223 let next = self.peek()?;
224
225 // Ignore character close
226 if next == '\'' {
227 return None;
228 }
229
230 let start = self.get_pos();
231 self.advance();
232
233 if next == '\\' {
234 Some(self.parse_escape(start))
235 } else {
236 let end = self.get_pos();
237 Some(CharComponent::new(
238 TextRange::from_to(start, end),
239 CodePoint,
240 ))
241 }
242 }
243
244 pub fn parse_ignore_newline(&mut self, start: TextUnit) -> Option<StringComponent> {
245 // In string literals, when a `\` occurs immediately before the newline, the `\`,
246 // the newline, and all whitespace at the beginning of the next line are ignored
247 match self.peek() {
248 Some('\n') | Some('\r') => {
249 self.skip_whitespace();
250 Some(StringComponent::new(
251 TextRange::from_to(start, self.get_pos()),
252 StringComponentKind::IgnoreNewline,
253 ))
254 }
255 _ => None,
256 }
257 }
258
259 pub fn parse_string_component(&mut self) -> Option<StringComponent> {
260 let next = self.peek()?;
261
262 // Ignore string close
263 if next == '"' {
264 return None;
265 }
266
267 let start = self.get_pos();
268 self.advance();
269
270 if next == '\\' {
271 // Strings can use `\` to ignore newlines, so we first try to parse one of those
272 // before falling back to parsing char escapes
273 self.parse_ignore_newline(start).or_else(|| {
274 let char_component = self.parse_escape(start);
275 Some(StringComponent::new(
276 char_component.range,
277 StringComponentKind::Char(char_component.kind),
278 ))
279 })
280 } else {
281 let end = self.get_pos();
282 Some(StringComponent::new(
283 TextRange::from_to(start, end),
284 StringComponentKind::Char(CodePoint),
285 ))
286 }
287 }
288}
289
290#[cfg(test)]
291mod tests {
292 use super::*;
293
294 fn parse(src: &str) -> (bool, Vec<CharComponent>) {
295 let component_iterator = &mut super::parse_char_literal(src);
296 let components: Vec<_> = component_iterator.collect();
297 (component_iterator.has_closing_quote, components)
298 }
299
300 fn unclosed_char_component(src: &str) -> CharComponent {
301 let (has_closing_quote, components) = parse(src);
302 assert!(!has_closing_quote, "char should not have closing quote");
303 assert!(components.len() == 1);
304 components[0].clone()
305 }
306
307 fn closed_char_component(src: &str) -> CharComponent {
308 let (has_closing_quote, components) = parse(src);
309 assert!(has_closing_quote, "char should have closing quote");
310 assert!(
311 components.len() == 1,
312 "Literal: {}\nComponents: {:#?}",
313 src,
314 components
315 );
316 components[0].clone()
317 }
318
319 fn closed_char_components(src: &str) -> Vec<CharComponent> {
320 let (has_closing_quote, components) = parse(src);
321 assert!(has_closing_quote, "char should have closing quote");
322 components
323 }
324
325 fn range_closed(src: &str) -> TextRange {
326 TextRange::from_to(1.into(), (src.len() as u32 - 1).into())
327 }
328
329 fn range_unclosed(src: &str) -> TextRange {
330 TextRange::from_to(1.into(), (src.len() as u32).into())
331 }
332
333 #[test]
334 fn test_unicode_escapes() {
335 let unicode_escapes = &[r"{DEAD}", "{BEEF}", "{FF}", "{}", ""];
336 for escape in unicode_escapes {
337 let escape_sequence = format!(r"'\u{}'", escape);
338 let component = closed_char_component(&escape_sequence);
339 let expected_range = range_closed(&escape_sequence);
340 assert_eq!(component.kind, CharComponentKind::UnicodeEscape);
341 assert_eq!(component.range, expected_range);
342 }
343 }
344
345 #[test]
346 fn test_unicode_escapes_unclosed() {
347 let unicode_escapes = &["{DEAD", "{BEEF", "{FF"];
348 for escape in unicode_escapes {
349 let escape_sequence = format!(r"'\u{}'", escape);
350 let component = unclosed_char_component(&escape_sequence);
351 let expected_range = range_unclosed(&escape_sequence);
352 assert_eq!(component.kind, CharComponentKind::UnicodeEscape);
353 assert_eq!(component.range, expected_range);
354 }
355 }
356
357 #[test]
358 fn test_empty_char() {
359 let (has_closing_quote, components) = parse("''");
360 assert!(has_closing_quote, "char should have closing quote");
361 assert!(components.len() == 0);
362 }
363
364 #[test]
365 fn test_unclosed_char() {
366 let component = unclosed_char_component("'a");
367 assert!(component.kind == CodePoint);
368 assert!(component.range == TextRange::from_to(1.into(), 2.into()));
369 }
370
371 #[test]
372 fn test_digit_escapes() {
373 let literals = &[r"", r"5", r"55"];
374
375 for literal in literals {
376 let lit_text = format!(r"'\x{}'", literal);
377 let component = closed_char_component(&lit_text);
378 assert!(component.kind == CharComponentKind::AsciiCodeEscape);
379 assert!(component.range == range_closed(&lit_text));
380 }
381
382 // More than 2 digits starts a new codepoint
383 let components = closed_char_components(r"'\x555'");
384 assert!(components.len() == 2);
385 assert!(components[1].kind == CharComponentKind::CodePoint);
386 }
387
388 #[test]
389 fn test_ascii_escapes() {
390 let literals = &[
391 r"\'", "\\\"", // equivalent to \"
392 r"\n", r"\r", r"\t", r"\\", r"\0",
393 ];
394
395 for literal in literals {
396 let lit_text = format!("'{}'", literal);
397 let component = closed_char_component(&lit_text);
398 assert!(component.kind == CharComponentKind::AsciiEscape);
399 assert!(component.range == range_closed(&lit_text));
400 }
401 }
402
403 #[test]
404 fn test_no_escapes() {
405 let literals = &['"', 'n', 'r', 't', '0', 'x', 'u'];
406
407 for &literal in literals {
408 let lit_text = format!("'{}'", literal);
409 let component = closed_char_component(&lit_text);
410 assert!(component.kind == CharComponentKind::CodePoint);
411 assert!(component.range == range_closed(&lit_text));
412 }
413 }
414}
diff --git a/crates/ra_syntax/src/string_lexing/byte.rs b/crates/ra_syntax/src/string_lexing/byte.rs
new file mode 100644
index 000000000..24424349c
--- /dev/null
+++ b/crates/ra_syntax/src/string_lexing/byte.rs
@@ -0,0 +1,51 @@
1use super::parser::Parser;
2use super::CharComponent;
3
4pub fn parse_byte_literal(src: &str) -> ByteComponentIterator {
5 ByteComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11pub struct ByteComponentIterator<'a> {
12 parser: Parser<'a>,
13 pub has_closing_quote: bool,
14}
15
16impl<'a> Iterator for ByteComponentIterator<'a> {
17 type Item = CharComponent;
18 fn next(&mut self) -> Option<CharComponent> {
19 if self.parser.pos == 0 {
20 assert!(
21 self.parser.advance() == 'b',
22 "Byte literal should start with a `b`"
23 );
24
25 assert!(
26 self.parser.advance() == '\'',
27 "Byte literal should start with a `b`, followed by a quote"
28 );
29 }
30
31 if let Some(component) = self.parser.parse_char_component() {
32 return Some(component);
33 }
34
35 // We get here when there are no char components left to parse
36 if self.parser.peek() == Some('\'') {
37 self.parser.advance();
38 self.has_closing_quote = true;
39 }
40
41 assert!(
42 self.parser.peek() == None,
43 "byte literal should leave no unparsed input: src = {}, pos = {}, length = {}",
44 self.parser.src,
45 self.parser.pos,
46 self.parser.src.len()
47 );
48
49 None
50 }
51}
diff --git a/crates/ra_syntax/src/string_lexing/byte_string.rs b/crates/ra_syntax/src/string_lexing/byte_string.rs
new file mode 100644
index 000000000..5b6dda760
--- /dev/null
+++ b/crates/ra_syntax/src/string_lexing/byte_string.rs
@@ -0,0 +1,51 @@
1use super::parser::Parser;
2use super::StringComponent;
3
4pub fn parse_byte_string_literal(src: &str) -> ByteStringComponentIterator {
5 ByteStringComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11pub struct ByteStringComponentIterator<'a> {
12 parser: Parser<'a>,
13 pub has_closing_quote: bool,
14}
15
16impl<'a> Iterator for ByteStringComponentIterator<'a> {
17 type Item = StringComponent;
18 fn next(&mut self) -> Option<StringComponent> {
19 if self.parser.pos == 0 {
20 assert!(
21 self.parser.advance() == 'b',
22 "byte string literal should start with a `b`"
23 );
24
25 assert!(
26 self.parser.advance() == '"',
27 "byte string literal should start with a `b`, followed by double quotes"
28 );
29 }
30
31 if let Some(component) = self.parser.parse_string_component() {
32 return Some(component);
33 }
34
35 // We get here when there are no char components left to parse
36 if self.parser.peek() == Some('"') {
37 self.parser.advance();
38 self.has_closing_quote = true;
39 }
40
41 assert!(
42 self.parser.peek() == None,
43 "byte string literal should leave no unparsed input: src = {}, pos = {}, length = {}",
44 self.parser.src,
45 self.parser.pos,
46 self.parser.src.len()
47 );
48
49 None
50 }
51}
diff --git a/crates/ra_syntax/src/string_lexing/char.rs b/crates/ra_syntax/src/string_lexing/char.rs
new file mode 100644
index 000000000..885c03b14
--- /dev/null
+++ b/crates/ra_syntax/src/string_lexing/char.rs
@@ -0,0 +1,176 @@
1use super::parser::Parser;
2use super::CharComponent;
3
4pub fn parse_char_literal(src: &str) -> CharComponentIterator {
5 CharComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11pub struct CharComponentIterator<'a> {
12 parser: Parser<'a>,
13 pub has_closing_quote: bool,
14}
15
16impl<'a> Iterator for CharComponentIterator<'a> {
17 type Item = CharComponent;
18 fn next(&mut self) -> Option<CharComponent> {
19 if self.parser.pos == 0 {
20 assert!(
21 self.parser.advance() == '\'',
22 "char literal should start with a quote"
23 );
24 }
25
26 if let Some(component) = self.parser.parse_char_component() {
27 return Some(component);
28 }
29
30 // We get here when there are no char components left to parse
31 if self.parser.peek() == Some('\'') {
32 self.parser.advance();
33 self.has_closing_quote = true;
34 }
35
36 assert!(
37 self.parser.peek() == None,
38 "char literal should leave no unparsed input: src = {}, pos = {}, length = {}",
39 self.parser.src,
40 self.parser.pos,
41 self.parser.src.len()
42 );
43
44 None
45 }
46}
47
48#[cfg(test)]
49mod tests {
50 use rowan::TextRange;
51 use crate::string_lexing::{
52 CharComponent,
53 CharComponentKind::*,
54};
55
56 fn parse(src: &str) -> (bool, Vec<CharComponent>) {
57 let component_iterator = &mut super::parse_char_literal(src);
58 let components: Vec<_> = component_iterator.collect();
59 (component_iterator.has_closing_quote, components)
60 }
61
62 fn unclosed_char_component(src: &str) -> CharComponent {
63 let (has_closing_quote, components) = parse(src);
64 assert!(!has_closing_quote, "char should not have closing quote");
65 assert!(components.len() == 1);
66 components[0].clone()
67 }
68
69 fn closed_char_component(src: &str) -> CharComponent {
70 let (has_closing_quote, components) = parse(src);
71 assert!(has_closing_quote, "char should have closing quote");
72 assert!(
73 components.len() == 1,
74 "Literal: {}\nComponents: {:#?}",
75 src,
76 components
77 );
78 components[0].clone()
79 }
80
81 fn closed_char_components(src: &str) -> Vec<CharComponent> {
82 let (has_closing_quote, components) = parse(src);
83 assert!(has_closing_quote, "char should have closing quote");
84 components
85 }
86
87 fn range_closed(src: &str) -> TextRange {
88 TextRange::from_to(1.into(), (src.len() as u32 - 1).into())
89 }
90
91 fn range_unclosed(src: &str) -> TextRange {
92 TextRange::from_to(1.into(), (src.len() as u32).into())
93 }
94
95 #[test]
96 fn test_unicode_escapes() {
97 let unicode_escapes = &[r"{DEAD}", "{BEEF}", "{FF}", "{}", ""];
98 for escape in unicode_escapes {
99 let escape_sequence = format!(r"'\u{}'", escape);
100 let component = closed_char_component(&escape_sequence);
101 let expected_range = range_closed(&escape_sequence);
102 assert_eq!(component.kind, UnicodeEscape);
103 assert_eq!(component.range, expected_range);
104 }
105 }
106
107 #[test]
108 fn test_unicode_escapes_unclosed() {
109 let unicode_escapes = &["{DEAD", "{BEEF", "{FF"];
110 for escape in unicode_escapes {
111 let escape_sequence = format!(r"'\u{}'", escape);
112 let component = unclosed_char_component(&escape_sequence);
113 let expected_range = range_unclosed(&escape_sequence);
114 assert_eq!(component.kind, UnicodeEscape);
115 assert_eq!(component.range, expected_range);
116 }
117 }
118
119 #[test]
120 fn test_empty_char() {
121 let (has_closing_quote, components) = parse("''");
122 assert!(has_closing_quote, "char should have closing quote");
123 assert!(components.len() == 0);
124 }
125
126 #[test]
127 fn test_unclosed_char() {
128 let component = unclosed_char_component("'a");
129 assert!(component.kind == CodePoint);
130 assert!(component.range == TextRange::from_to(1.into(), 2.into()));
131 }
132
133 #[test]
134 fn test_digit_escapes() {
135 let literals = &[r"", r"5", r"55"];
136
137 for literal in literals {
138 let lit_text = format!(r"'\x{}'", literal);
139 let component = closed_char_component(&lit_text);
140 assert!(component.kind == AsciiCodeEscape);
141 assert!(component.range == range_closed(&lit_text));
142 }
143
144 // More than 2 digits starts a new codepoint
145 let components = closed_char_components(r"'\x555'");
146 assert!(components.len() == 2);
147 assert!(components[1].kind == CodePoint);
148 }
149
150 #[test]
151 fn test_ascii_escapes() {
152 let literals = &[
153 r"\'", "\\\"", // equivalent to \"
154 r"\n", r"\r", r"\t", r"\\", r"\0",
155 ];
156
157 for literal in literals {
158 let lit_text = format!("'{}'", literal);
159 let component = closed_char_component(&lit_text);
160 assert!(component.kind == AsciiEscape);
161 assert!(component.range == range_closed(&lit_text));
162 }
163 }
164
165 #[test]
166 fn test_no_escapes() {
167 let literals = &['"', 'n', 'r', 't', '0', 'x', 'u'];
168
169 for &literal in literals {
170 let lit_text = format!("'{}'", literal);
171 let component = closed_char_component(&lit_text);
172 assert!(component.kind == CodePoint);
173 assert!(component.range == range_closed(&lit_text));
174 }
175 }
176}
diff --git a/crates/ra_syntax/src/string_lexing/mod.rs b/crates/ra_syntax/src/string_lexing/mod.rs
new file mode 100644
index 000000000..94853331f
--- /dev/null
+++ b/crates/ra_syntax/src/string_lexing/mod.rs
@@ -0,0 +1,13 @@
1mod parser;
2mod byte;
3mod byte_string;
4mod char;
5mod string;
6
7pub use self::{
8 byte::parse_byte_literal,
9 byte_string::parse_byte_string_literal,
10 char::parse_char_literal,
11 parser::{CharComponent, CharComponentKind, StringComponent, StringComponentKind},
12 string::parse_string_literal,
13};
diff --git a/crates/ra_syntax/src/string_lexing/parser.rs b/crates/ra_syntax/src/string_lexing/parser.rs
new file mode 100644
index 000000000..4a6d5bc93
--- /dev/null
+++ b/crates/ra_syntax/src/string_lexing/parser.rs
@@ -0,0 +1,201 @@
1use rowan::{TextRange, TextUnit};
2
3use self::CharComponentKind::*;
4
5pub struct Parser<'a> {
6 pub(super) src: &'a str,
7 pub(super) pos: usize,
8}
9
10impl<'a> Parser<'a> {
11 pub fn new(src: &'a str) -> Parser<'a> {
12 Parser { src, pos: 0 }
13 }
14
15 // Utility methods
16
17 pub fn peek(&self) -> Option<char> {
18 if self.pos == self.src.len() {
19 return None;
20 }
21
22 self.src[self.pos..].chars().next()
23 }
24
25 pub fn advance(&mut self) -> char {
26 let next = self
27 .peek()
28 .expect("cannot advance if end of input is reached");
29 self.pos += next.len_utf8();
30 next
31 }
32
33 pub fn skip_whitespace(&mut self) {
34 while self.peek().map(|c| c.is_whitespace()) == Some(true) {
35 self.advance();
36 }
37 }
38
39 pub fn get_pos(&self) -> TextUnit {
40 (self.pos as u32).into()
41 }
42
43 // Char parsing methods
44
45 fn parse_unicode_escape(&mut self, start: TextUnit) -> CharComponent {
46 match self.peek() {
47 Some('{') => {
48 self.advance();
49
50 // Parse anything until we reach `}`
51 while let Some(next) = self.peek() {
52 self.advance();
53 if next == '}' {
54 break;
55 }
56 }
57
58 let end = self.get_pos();
59 CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
60 }
61 Some(_) | None => {
62 let end = self.get_pos();
63 CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
64 }
65 }
66 }
67
68 fn parse_ascii_code_escape(&mut self, start: TextUnit) -> CharComponent {
69 let code_start = self.get_pos();
70 while let Some(next) = self.peek() {
71 if next == '\'' || (self.get_pos() - code_start == 2.into()) {
72 break;
73 }
74
75 self.advance();
76 }
77
78 let end = self.get_pos();
79 CharComponent::new(TextRange::from_to(start, end), AsciiCodeEscape)
80 }
81
82 fn parse_escape(&mut self, start: TextUnit) -> CharComponent {
83 if self.peek().is_none() {
84 return CharComponent::new(TextRange::from_to(start, start), AsciiEscape);
85 }
86
87 let next = self.advance();
88 let end = self.get_pos();
89 let range = TextRange::from_to(start, end);
90 match next {
91 'x' => self.parse_ascii_code_escape(start),
92 'u' => self.parse_unicode_escape(start),
93 _ => CharComponent::new(range, AsciiEscape),
94 }
95 }
96
97 pub fn parse_char_component(&mut self) -> Option<CharComponent> {
98 let next = self.peek()?;
99
100 // Ignore character close
101 if next == '\'' {
102 return None;
103 }
104
105 let start = self.get_pos();
106 self.advance();
107
108 if next == '\\' {
109 Some(self.parse_escape(start))
110 } else {
111 let end = self.get_pos();
112 Some(CharComponent::new(
113 TextRange::from_to(start, end),
114 CodePoint,
115 ))
116 }
117 }
118
119 pub fn parse_ignore_newline(&mut self, start: TextUnit) -> Option<StringComponent> {
120 // In string literals, when a `\` occurs immediately before the newline, the `\`,
121 // the newline, and all whitespace at the beginning of the next line are ignored
122 match self.peek() {
123 Some('\n') | Some('\r') => {
124 self.skip_whitespace();
125 Some(StringComponent::new(
126 TextRange::from_to(start, self.get_pos()),
127 StringComponentKind::IgnoreNewline,
128 ))
129 }
130 _ => None,
131 }
132 }
133
134 pub fn parse_string_component(&mut self) -> Option<StringComponent> {
135 let next = self.peek()?;
136
137 // Ignore string close
138 if next == '"' {
139 return None;
140 }
141
142 let start = self.get_pos();
143 self.advance();
144
145 if next == '\\' {
146 // Strings can use `\` to ignore newlines, so we first try to parse one of those
147 // before falling back to parsing char escapes
148 self.parse_ignore_newline(start).or_else(|| {
149 let char_component = self.parse_escape(start);
150 Some(StringComponent::new(
151 char_component.range,
152 StringComponentKind::Char(char_component.kind),
153 ))
154 })
155 } else {
156 let end = self.get_pos();
157 Some(StringComponent::new(
158 TextRange::from_to(start, end),
159 StringComponentKind::Char(CodePoint),
160 ))
161 }
162 }
163}
164
165#[derive(Debug, Eq, PartialEq, Clone)]
166pub struct StringComponent {
167 pub range: TextRange,
168 pub kind: StringComponentKind,
169}
170
171impl StringComponent {
172 fn new(range: TextRange, kind: StringComponentKind) -> StringComponent {
173 StringComponent { range, kind }
174 }
175}
176
177#[derive(Debug, Eq, PartialEq, Clone)]
178pub enum StringComponentKind {
179 IgnoreNewline,
180 Char(CharComponentKind),
181}
182
183#[derive(Debug, Eq, PartialEq, Clone)]
184pub struct CharComponent {
185 pub range: TextRange,
186 pub kind: CharComponentKind,
187}
188
189impl CharComponent {
190 fn new(range: TextRange, kind: CharComponentKind) -> CharComponent {
191 CharComponent { range, kind }
192 }
193}
194
195#[derive(Debug, Eq, PartialEq, Clone)]
196pub enum CharComponentKind {
197 CodePoint,
198 AsciiEscape,
199 AsciiCodeEscape,
200 UnicodeEscape,
201}
diff --git a/crates/ra_syntax/src/string_lexing/string.rs b/crates/ra_syntax/src/string_lexing/string.rs
new file mode 100644
index 000000000..1b23029c6
--- /dev/null
+++ b/crates/ra_syntax/src/string_lexing/string.rs
@@ -0,0 +1,46 @@
1use super::parser::Parser;
2use super::StringComponent;
3
4pub fn parse_string_literal(src: &str) -> StringComponentIterator {
5 StringComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11pub struct StringComponentIterator<'a> {
12 parser: Parser<'a>,
13 pub has_closing_quote: bool,
14}
15
16impl<'a> Iterator for StringComponentIterator<'a> {
17 type Item = StringComponent;
18 fn next(&mut self) -> Option<StringComponent> {
19 if self.parser.pos == 0 {
20 assert!(
21 self.parser.advance() == '"',
22 "string literal should start with double quotes"
23 );
24 }
25
26 if let Some(component) = self.parser.parse_string_component() {
27 return Some(component);
28 }
29
30 // We get here when there are no char components left to parse
31 if self.parser.peek() == Some('"') {
32 self.parser.advance();
33 self.has_closing_quote = true;
34 }
35
36 assert!(
37 self.parser.peek() == None,
38 "string literal should leave no unparsed input: src = {}, pos = {}, length = {}",
39 self.parser.src,
40 self.parser.pos,
41 self.parser.src.len()
42 );
43
44 None
45 }
46}
diff --git a/crates/ra_syntax/src/validation/byte.rs b/crates/ra_syntax/src/validation/byte.rs
new file mode 100644
index 000000000..43c0d7edd
--- /dev/null
+++ b/crates/ra_syntax/src/validation/byte.rs
@@ -0,0 +1,211 @@
1//! Validation of byte literals
2
3use crate::{
4 ast::{self, AstNode},
5 string_lexing::{self, CharComponentKind},
6 TextRange,
7 validation::char,
8 yellow::{
9 SyntaxError,
10 SyntaxErrorKind::*,
11 },
12};
13
14pub(super) fn validate_byte_node(node: ast::Byte, errors: &mut Vec<SyntaxError>) {
15 let literal_text = node.text();
16 let literal_range = node.syntax().range();
17 let mut components = string_lexing::parse_byte_literal(literal_text);
18 let mut len = 0;
19 for component in &mut components {
20 len += 1;
21 let text = &literal_text[component.range];
22 let range = component.range + literal_range.start();
23 validate_byte_component(text, component.kind, range, errors);
24 }
25
26 if !components.has_closing_quote {
27 errors.push(SyntaxError::new(UnclosedByte, literal_range));
28 }
29
30 if len == 0 {
31 errors.push(SyntaxError::new(EmptyByte, literal_range));
32 }
33
34 if len > 1 {
35 errors.push(SyntaxError::new(OverlongByte, literal_range));
36 }
37}
38
39pub(super) fn validate_byte_component(
40 text: &str,
41 kind: CharComponentKind,
42 range: TextRange,
43 errors: &mut Vec<SyntaxError>,
44) {
45 use self::CharComponentKind::*;
46 match kind {
47 AsciiEscape => validate_byte_escape(text, range, errors),
48 AsciiCodeEscape => validate_byte_code_escape(text, range, errors),
49 UnicodeEscape => errors.push(SyntaxError::new(UnicodeEscapeForbidden, range)),
50 CodePoint => {
51 let c = text
52 .chars()
53 .next()
54 .expect("Code points should be one character long");
55
56 // These bytes must always be escaped
57 if c == '\t' || c == '\r' || c == '\n' {
58 errors.push(SyntaxError::new(UnescapedByte, range));
59 }
60
61 // Only ASCII bytes are allowed
62 if c > 0x7F as char {
63 errors.push(SyntaxError::new(ByteOutOfRange, range));
64 }
65 }
66 }
67}
68
69fn validate_byte_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
70 if text.len() == 1 {
71 // Escape sequence consists only of leading `\`
72 errors.push(SyntaxError::new(EmptyByteEscape, range));
73 } else {
74 let escape_code = text.chars().skip(1).next().unwrap();
75 if !char::is_ascii_escape(escape_code) {
76 errors.push(SyntaxError::new(InvalidByteEscape, range));
77 }
78 }
79}
80
81fn validate_byte_code_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
82 // A ByteCodeEscape has 4 chars, example: `\xDD`
83 if text.len() < 4 {
84 errors.push(SyntaxError::new(TooShortByteCodeEscape, range));
85 } else {
86 assert!(
87 text.chars().count() == 4,
88 "ByteCodeEscape cannot be longer than 4 chars"
89 );
90
91 if u8::from_str_radix(&text[2..], 16).is_err() {
92 errors.push(SyntaxError::new(MalformedByteCodeEscape, range));
93 }
94 }
95}
96
97#[cfg(test)]
98mod test {
99 use crate::SourceFileNode;
100
101 fn build_file(literal: &str) -> SourceFileNode {
102 let src = format!("const C: u8 = b'{}';", literal);
103 SourceFileNode::parse(&src)
104 }
105
106 fn assert_valid_byte(literal: &str) {
107 let file = build_file(literal);
108 assert!(
109 file.errors().len() == 0,
110 "Errors for literal '{}': {:?}",
111 literal,
112 file.errors()
113 );
114 }
115
116 fn assert_invalid_byte(literal: &str) {
117 let file = build_file(literal);
118 assert!(file.errors().len() > 0);
119 }
120
121 #[test]
122 fn test_ansi_codepoints() {
123 for byte in 0..128 {
124 match byte {
125 b'\n' | b'\r' | b'\t' => assert_invalid_byte(&(byte as char).to_string()),
126 b'\'' | b'\\' => { /* Ignore character close and backslash */ }
127 _ => assert_valid_byte(&(byte as char).to_string()),
128 }
129 }
130
131 for byte in 128..=255u8 {
132 assert_invalid_byte(&(byte as char).to_string());
133 }
134 }
135
136 #[test]
137 fn test_unicode_codepoints() {
138 let invalid = ["Ƒ", "バ", "メ", "﷽"];
139 for c in &invalid {
140 assert_invalid_byte(c);
141 }
142 }
143
144 #[test]
145 fn test_unicode_multiple_codepoints() {
146 let invalid = ["नी", "👨‍👨‍"];
147 for c in &invalid {
148 assert_invalid_byte(c);
149 }
150 }
151
152 #[test]
153 fn test_valid_byte_escape() {
154 let valid = [r"\'", "\"", "\\\\", "\\\"", r"\n", r"\r", r"\t", r"\0"];
155 for c in &valid {
156 assert_valid_byte(c);
157 }
158 }
159
160 #[test]
161 fn test_invalid_byte_escape() {
162 let invalid = [r"\a", r"\?", r"\"];
163 for c in &invalid {
164 assert_invalid_byte(c);
165 }
166 }
167
168 #[test]
169 fn test_valid_byte_code_escape() {
170 let valid = [r"\x00", r"\x7F", r"\x55", r"\xF0"];
171 for c in &valid {
172 assert_valid_byte(c);
173 }
174 }
175
176 #[test]
177 fn test_invalid_byte_code_escape() {
178 let invalid = [r"\x", r"\x7"];
179 for c in &invalid {
180 assert_invalid_byte(c);
181 }
182 }
183
184 #[test]
185 fn test_invalid_unicode_escape() {
186 let well_formed = [
187 r"\u{FF}",
188 r"\u{0}",
189 r"\u{F}",
190 r"\u{10FFFF}",
191 r"\u{1_0__FF___FF_____}",
192 ];
193 for c in &well_formed {
194 assert_invalid_byte(c);
195 }
196
197 let invalid = [
198 r"\u",
199 r"\u{}",
200 r"\u{",
201 r"\u{FF",
202 r"\u{FFFFFF}",
203 r"\u{_F}",
204 r"\u{00FFFFF}",
205 r"\u{110000}",
206 ];
207 for c in &invalid {
208 assert_invalid_byte(c);
209 }
210 }
211}
diff --git a/crates/ra_syntax/src/validation/byte_string.rs b/crates/ra_syntax/src/validation/byte_string.rs
new file mode 100644
index 000000000..7b830e97c
--- /dev/null
+++ b/crates/ra_syntax/src/validation/byte_string.rs
@@ -0,0 +1,178 @@
1use crate::{
2 ast::{self, AstNode},
3 string_lexing::{self, StringComponentKind},
4 yellow::{
5 SyntaxError,
6 SyntaxErrorKind::*,
7 },
8};
9
10use super::byte;
11
12pub(crate) fn validate_byte_string_node(node: ast::ByteString, errors: &mut Vec<SyntaxError>) {
13 let literal_text = node.text();
14 let literal_range = node.syntax().range();
15 let mut components = string_lexing::parse_byte_string_literal(literal_text);
16 for component in &mut components {
17 let range = component.range + literal_range.start();
18
19 match component.kind {
20 StringComponentKind::Char(kind) => {
21 // Chars must escape \t, \n and \r codepoints, but strings don't
22 let text = &literal_text[component.range];
23 match text {
24 "\t" | "\n" | "\r" => { /* always valid */ }
25 _ => byte::validate_byte_component(text, kind, range, errors),
26 }
27 }
28 StringComponentKind::IgnoreNewline => { /* always valid */ }
29 }
30 }
31
32 if !components.has_closing_quote {
33 errors.push(SyntaxError::new(UnclosedString, literal_range));
34 }
35}
36
37#[cfg(test)]
38mod test {
39 use crate::SourceFileNode;
40
41 fn build_file(literal: &str) -> SourceFileNode {
42 let src = format!(r#"const S: &'static [u8] = b"{}";"#, literal);
43 println!("Source: {}", src);
44 SourceFileNode::parse(&src)
45 }
46
47 fn assert_valid_str(literal: &str) {
48 let file = build_file(literal);
49 assert!(
50 file.errors().len() == 0,
51 "Errors for literal '{}': {:?}",
52 literal,
53 file.errors()
54 );
55 }
56
57 fn assert_invalid_str(literal: &str) {
58 let file = build_file(literal);
59 assert!(file.errors().len() > 0);
60 }
61
62 #[test]
63 fn test_ansi_codepoints() {
64 for byte in 0..128 {
65 match byte {
66 b'\"' | b'\\' => { /* Ignore string close and backslash */ }
67 _ => assert_valid_str(&(byte as char).to_string()),
68 }
69 }
70
71 for byte in 128..=255u8 {
72 assert_invalid_str(&(byte as char).to_string());
73 }
74 }
75
76 #[test]
77 fn test_unicode_codepoints() {
78 let invalid = ["Ƒ", "バ", "メ", "﷽"];
79 for c in &invalid {
80 assert_invalid_str(c);
81 }
82 }
83
84 #[test]
85 fn test_unicode_multiple_codepoints() {
86 let invalid = ["नी", "👨‍👨‍"];
87 for c in &invalid {
88 assert_invalid_str(c);
89 }
90 }
91
92 #[test]
93 fn test_valid_ascii_escape() {
94 let valid = [r"\'", r#"\""#, r"\\", r"\n", r"\r", r"\t", r"\0", "a", "b"];
95 for c in &valid {
96 assert_valid_str(c);
97 }
98 }
99
100 #[test]
101 fn test_invalid_ascii_escape() {
102 let invalid = [r"\a", r"\?", r"\"];
103 for c in &invalid {
104 assert_invalid_str(c);
105 }
106 }
107
108 #[test]
109 fn test_valid_ascii_code_escape() {
110 let valid = [r"\x00", r"\x7F", r"\x55", r"\xF0"];
111 for c in &valid {
112 assert_valid_str(c);
113 }
114 }
115
116 #[test]
117 fn test_invalid_ascii_code_escape() {
118 let invalid = [r"\x", r"\x7"];
119 for c in &invalid {
120 assert_invalid_str(c);
121 }
122 }
123
124 #[test]
125 fn test_invalid_unicode_escape() {
126 let well_formed = [
127 r"\u{FF}",
128 r"\u{0}",
129 r"\u{F}",
130 r"\u{10FFFF}",
131 r"\u{1_0__FF___FF_____}",
132 ];
133 for c in &well_formed {
134 assert_invalid_str(c);
135 }
136
137 let invalid = [
138 r"\u",
139 r"\u{}",
140 r"\u{",
141 r"\u{FF",
142 r"\u{FFFFFF}",
143 r"\u{_F}",
144 r"\u{00FFFFF}",
145 r"\u{110000}",
146 ];
147 for c in &invalid {
148 assert_invalid_str(c);
149 }
150 }
151
152 #[test]
153 fn test_mixed_invalid() {
154 assert_invalid_str(
155 r"This is the tale of a string
156with a newline in between, some emoji (👨‍👨‍) here and there,
157unicode escapes like this: \u{1FFBB} and weird stuff like
158this ﷽",
159 );
160 }
161
162 #[test]
163 fn test_mixed_valid() {
164 assert_valid_str(
165 r"This is the tale of a string
166with a newline in between, no emoji at all,
167nor unicode escapes or weird stuff",
168 );
169 }
170
171 #[test]
172 fn test_ignore_newline() {
173 assert_valid_str(
174 "Hello \
175 World",
176 );
177 }
178}
diff --git a/crates/ra_syntax/src/validation/char.rs b/crates/ra_syntax/src/validation/char.rs
index 63f9bad24..4728c85e6 100644
--- a/crates/ra_syntax/src/validation/char.rs
+++ b/crates/ra_syntax/src/validation/char.rs
@@ -1,3 +1,5 @@
1//! Validation of char literals
2
1use std::u32; 3use std::u32;
2 4
3use arrayvec::ArrayString; 5use arrayvec::ArrayString;
@@ -12,7 +14,7 @@ use crate::{
12 }, 14 },
13}; 15};
14 16
15pub(crate) fn validate_char_node(node: ast::Char, errors: &mut Vec<SyntaxError>) { 17pub(super) fn validate_char_node(node: ast::Char, errors: &mut Vec<SyntaxError>) {
16 let literal_text = node.text(); 18 let literal_text = node.text();
17 let literal_range = node.syntax().range(); 19 let literal_range = node.syntax().range();
18 let mut components = string_lexing::parse_char_literal(literal_text); 20 let mut components = string_lexing::parse_char_literal(literal_text);
@@ -37,7 +39,7 @@ pub(crate) fn validate_char_node(node: ast::Char, errors: &mut Vec<SyntaxError>)
37 } 39 }
38} 40}
39 41
40pub(crate) fn validate_char_component( 42pub(super) fn validate_char_component(
41 text: &str, 43 text: &str,
42 kind: CharComponentKind, 44 kind: CharComponentKind,
43 range: TextRange, 45 range: TextRange,
@@ -46,109 +48,115 @@ pub(crate) fn validate_char_component(
46 // Validate escapes 48 // Validate escapes
47 use self::CharComponentKind::*; 49 use self::CharComponentKind::*;
48 match kind { 50 match kind {
49 AsciiEscape => { 51 AsciiEscape => validate_ascii_escape(text, range, errors),
50 if text.len() == 1 { 52 AsciiCodeEscape => validate_ascii_code_escape(text, range, errors),
51 // Escape sequence consists only of leading `\` 53 UnicodeEscape => validate_unicode_escape(text, range, errors),
52 errors.push(SyntaxError::new(EmptyAsciiEscape, range)); 54 CodePoint => {
53 } else { 55 // These code points must always be escaped
54 let escape_code = text.chars().skip(1).next().unwrap(); 56 if text == "\t" || text == "\r" || text == "\n" {
55 if !is_ascii_escape(escape_code) { 57 errors.push(SyntaxError::new(UnescapedCodepoint, range));
56 errors.push(SyntaxError::new(InvalidAsciiEscape, range));
57 }
58 } 58 }
59 } 59 }
60 AsciiCodeEscape => { 60 }
61 // An AsciiCodeEscape has 4 chars, example: `\xDD` 61}
62 if text.len() < 4 { 62
63 errors.push(SyntaxError::new(TooShortAsciiCodeEscape, range)); 63fn validate_ascii_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
64 } else { 64 if text.len() == 1 {
65 assert!( 65 // Escape sequence consists only of leading `\`
66 text.chars().count() == 4, 66 errors.push(SyntaxError::new(EmptyAsciiEscape, range));
67 "AsciiCodeEscape cannot be longer than 4 chars" 67 } else {
68 ); 68 let escape_code = text.chars().skip(1).next().unwrap();
69 69 if !is_ascii_escape(escape_code) {
70 match u8::from_str_radix(&text[2..], 16) { 70 errors.push(SyntaxError::new(InvalidAsciiEscape, range));
71 Ok(code) if code < 128 => { /* Escape code is valid */ }
72 Ok(_) => errors.push(SyntaxError::new(AsciiCodeEscapeOutOfRange, range)),
73 Err(_) => errors.push(SyntaxError::new(MalformedAsciiCodeEscape, range)),
74 }
75 }
76 } 71 }
77 UnicodeEscape => { 72 }
78 assert!(&text[..2] == "\\u", "UnicodeEscape always starts with \\u"); 73}
79 74
80 if text.len() == 2 { 75pub(super) fn is_ascii_escape(code: char) -> bool {
81 // No starting `{` 76 match code {
82 errors.push(SyntaxError::new(MalformedUnicodeEscape, range)); 77 '\\' | '\'' | '"' | 'n' | 'r' | 't' | '0' => true,
83 return; 78 _ => false,
84 } 79 }
80}
85 81
86 if text.len() == 3 { 82fn validate_ascii_code_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
87 // Only starting `{` 83 // An AsciiCodeEscape has 4 chars, example: `\xDD`
88 errors.push(SyntaxError::new(UnclosedUnicodeEscape, range)); 84 if text.len() < 4 {
89 return; 85 errors.push(SyntaxError::new(TooShortAsciiCodeEscape, range));
90 } 86 } else {
87 assert!(
88 text.chars().count() == 4,
89 "AsciiCodeEscape cannot be longer than 4 chars"
90 );
91 91
92 let mut code = ArrayString::<[_; 6]>::new(); 92 match u8::from_str_radix(&text[2..], 16) {
93 let mut closed = false; 93 Ok(code) if code < 128 => { /* Escape code is valid */ }
94 for c in text[3..].chars() { 94 Ok(_) => errors.push(SyntaxError::new(AsciiCodeEscapeOutOfRange, range)),
95 assert!(!closed, "no characters after escape is closed"); 95 Err(_) => errors.push(SyntaxError::new(MalformedAsciiCodeEscape, range)),
96 96 }
97 if c.is_digit(16) { 97 }
98 if code.len() == 6 { 98}
99 errors.push(SyntaxError::new(OverlongUnicodeEscape, range));
100 return;
101 }
102
103 code.push(c);
104 } else if c == '_' {
105 // Reject leading _
106 if code.len() == 0 {
107 errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
108 return;
109 }
110 } else if c == '}' {
111 closed = true;
112 } else {
113 errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
114 return;
115 }
116 }
117 99
118 if !closed { 100fn validate_unicode_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
119 errors.push(SyntaxError::new(UnclosedUnicodeEscape, range)) 101 assert!(&text[..2] == "\\u", "UnicodeEscape always starts with \\u");
120 }
121 102
122 if code.len() == 0 { 103 if text.len() == 2 {
123 errors.push(SyntaxError::new(EmptyUnicodeEcape, range)); 104 // No starting `{`
105 errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
106 return;
107 }
108
109 if text.len() == 3 {
110 // Only starting `{`
111 errors.push(SyntaxError::new(UnclosedUnicodeEscape, range));
112 return;
113 }
114
115 let mut code = ArrayString::<[_; 6]>::new();
116 let mut closed = false;
117 for c in text[3..].chars() {
118 assert!(!closed, "no characters after escape is closed");
119
120 if c.is_digit(16) {
121 if code.len() == 6 {
122 errors.push(SyntaxError::new(OverlongUnicodeEscape, range));
124 return; 123 return;
125 } 124 }
126 125
127 match u32::from_str_radix(&code, 16) { 126 code.push(c);
128 Ok(code_u32) if code_u32 > 0x10FFFF => { 127 } else if c == '_' {
129 errors.push(SyntaxError::new(UnicodeEscapeOutOfRange, range)); 128 // Reject leading _
130 } 129 if code.len() == 0 {
131 Ok(_) => { 130 errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
132 // Valid escape code 131 return;
133 }
134 Err(_) => {
135 errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
136 }
137 }
138 }
139 CodePoint => {
140 // These code points must always be escaped
141 if text == "\t" || text == "\r" {
142 errors.push(SyntaxError::new(UnescapedCodepoint, range));
143 } 132 }
133 } else if c == '}' {
134 closed = true;
135 } else {
136 errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
137 return;
144 } 138 }
145 } 139 }
146}
147 140
148fn is_ascii_escape(code: char) -> bool { 141 if !closed {
149 match code { 142 errors.push(SyntaxError::new(UnclosedUnicodeEscape, range))
150 '\\' | '\'' | '"' | 'n' | 'r' | 't' | '0' => true, 143 }
151 _ => false, 144
145 if code.len() == 0 {
146 errors.push(SyntaxError::new(EmptyUnicodeEcape, range));
147 return;
148 }
149
150 match u32::from_str_radix(&code, 16) {
151 Ok(code_u32) if code_u32 > 0x10FFFF => {
152 errors.push(SyntaxError::new(UnicodeEscapeOutOfRange, range));
153 }
154 Ok(_) => {
155 // Valid escape code
156 }
157 Err(_) => {
158 errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
159 }
152 } 160 }
153} 161}
154 162
@@ -205,9 +213,7 @@ mod test {
205 213
206 #[test] 214 #[test]
207 fn test_valid_ascii_escape() { 215 fn test_valid_ascii_escape() {
208 let valid = [ 216 let valid = [r"\'", "\"", "\\\\", "\\\"", r"\n", r"\r", r"\t", r"\0"];
209 r"\'", "\"", "\\\\", "\\\"", r"\n", r"\r", r"\t", r"\0", "a", "b",
210 ];
211 for c in &valid { 217 for c in &valid {
212 assert_valid_char(c); 218 assert_valid_char(c);
213 } 219 }
diff --git a/crates/ra_syntax/src/validation/mod.rs b/crates/ra_syntax/src/validation/mod.rs
index 2ff0bc26d..bdee8120c 100644
--- a/crates/ra_syntax/src/validation/mod.rs
+++ b/crates/ra_syntax/src/validation/mod.rs
@@ -5,6 +5,8 @@ use crate::{
5 yellow::SyntaxError, 5 yellow::SyntaxError,
6}; 6};
7 7
8mod byte;
9mod byte_string;
8mod char; 10mod char;
9mod string; 11mod string;
10 12
@@ -12,6 +14,8 @@ pub(crate) fn validate(file: &SourceFileNode) -> Vec<SyntaxError> {
12 let mut errors = Vec::new(); 14 let mut errors = Vec::new();
13 for node in file.syntax().descendants() { 15 for node in file.syntax().descendants() {
14 let _ = visitor_ctx(&mut errors) 16 let _ = visitor_ctx(&mut errors)
17 .visit::<ast::Byte, _>(self::byte::validate_byte_node)
18 .visit::<ast::ByteString, _>(self::byte_string::validate_byte_string_node)
15 .visit::<ast::Char, _>(self::char::validate_char_node) 19 .visit::<ast::Char, _>(self::char::validate_char_node)
16 .visit::<ast::String, _>(self::string::validate_string_node) 20 .visit::<ast::String, _>(self::string::validate_string_node)
17 .accept(node); 21 .accept(node);
diff --git a/crates/ra_syntax/src/yellow/syntax_error.rs b/crates/ra_syntax/src/yellow/syntax_error.rs
index cf7b1d495..c32ee650d 100644
--- a/crates/ra_syntax/src/yellow/syntax_error.rs
+++ b/crates/ra_syntax/src/yellow/syntax_error.rs
@@ -72,6 +72,16 @@ pub enum SyntaxErrorKind {
72 EmptyChar, 72 EmptyChar,
73 UnclosedChar, 73 UnclosedChar,
74 OverlongChar, 74 OverlongChar,
75 EmptyByte,
76 UnclosedByte,
77 OverlongByte,
78 ByteOutOfRange,
79 UnescapedByte,
80 EmptyByteEscape,
81 InvalidByteEscape,
82 TooShortByteCodeEscape,
83 MalformedByteCodeEscape,
84 UnicodeEscapeForbidden,
75 EmptyAsciiEscape, 85 EmptyAsciiEscape,
76 InvalidAsciiEscape, 86 InvalidAsciiEscape,
77 TooShortAsciiCodeEscape, 87 TooShortAsciiCodeEscape,
@@ -98,6 +108,19 @@ impl fmt::Display for SyntaxErrorKind {
98 EmptyChar => write!(f, "Empty char literal"), 108 EmptyChar => write!(f, "Empty char literal"),
99 UnclosedChar => write!(f, "Unclosed char literal"), 109 UnclosedChar => write!(f, "Unclosed char literal"),
100 OverlongChar => write!(f, "Char literal should be one character long"), 110 OverlongChar => write!(f, "Char literal should be one character long"),
111 EmptyByte => write!(f, "Empty byte literal"),
112 UnclosedByte => write!(f, "Unclosed byte literal"),
113 OverlongByte => write!(f, "Byte literal should be one character long"),
114 ByteOutOfRange => write!(f, "Byte should be a valid ASCII character"),
115 UnescapedByte => write!(f, "This byte should always be escaped"),
116 EmptyByteEscape => write!(f, "Empty escape sequence"),
117 InvalidByteEscape => write!(f, "Invalid escape sequence"),
118 TooShortByteCodeEscape => write!(f, "Escape sequence should have two digits"),
119 MalformedByteCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"),
120 UnicodeEscapeForbidden => write!(
121 f,
122 "Unicode escapes are not allowed in byte literals or byte strings"
123 ),
101 TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"), 124 TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"),
102 AsciiCodeEscapeOutOfRange => { 125 AsciiCodeEscapeOutOfRange => {
103 write!(f, "Escape sequence should be between \\x00 and \\x7F") 126 write!(f, "Escape sequence should be between \\x00 and \\x7F")
diff --git a/crates/tools/src/lib.rs b/crates/tools/src/lib.rs
index 8b8e9974e..3013f4e36 100644
--- a/crates/tools/src/lib.rs
+++ b/crates/tools/src/lib.rs
@@ -17,7 +17,7 @@ pub type Result<T> = ::std::result::Result<T, failure::Error>;
17pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron"; 17pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron";
18pub const SYNTAX_KINDS: &str = "crates/ra_syntax/src/syntax_kinds/generated.rs.tera"; 18pub const SYNTAX_KINDS: &str = "crates/ra_syntax/src/syntax_kinds/generated.rs.tera";
19pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs.tera"; 19pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs.tera";
20const TOOLCHAIN: &str = "beta-2018-10-30"; 20const TOOLCHAIN: &str = "beta-2018-11-24";
21 21
22#[derive(Debug)] 22#[derive(Debug)]
23pub struct Test { 23pub struct Test {
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index fe304623f..530a6f77d 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -527,8 +527,7 @@
527 "duplexer": { 527 "duplexer": {
528 "version": "0.1.1", 528 "version": "0.1.1",
529 "resolved": "http://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", 529 "resolved": "http://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz",
530 "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=", 530 "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E="
531 "dev": true
532 }, 531 },
533 "duplexify": { 532 "duplexify": {
534 "version": "3.6.0", 533 "version": "3.6.0",
@@ -587,18 +586,16 @@
587 "dev": true 586 "dev": true
588 }, 587 },
589 "event-stream": { 588 "event-stream": {
590 "version": "3.3.6", 589 "version": "3.3.4",
591 "resolved": "https://registry.npmjs.org/event-stream/-/event-stream-3.3.6.tgz", 590 "resolved": "http://registry.npmjs.org/event-stream/-/event-stream-3.3.4.tgz",
592 "integrity": "sha512-dGXNg4F/FgVzlApjzItL+7naHutA3fDqbV/zAZqDDlXTjiMnQmZKu+prImWKszeBM5UQeGvAl3u1wBiKeDh61g==", 591 "integrity": "sha1-SrTJoPWlTbkzi0w02Gv86PSzVXE=",
593 "dev": true,
594 "requires": { 592 "requires": {
595 "duplexer": "0.1.1", 593 "duplexer": "0.1.1",
596 "flatmap-stream": "0.1.0",
597 "from": "0.1.7", 594 "from": "0.1.7",
598 "map-stream": "0.0.7", 595 "map-stream": "0.1.0",
599 "pause-stream": "0.0.11", 596 "pause-stream": "0.0.11",
600 "split": "1.0.1", 597 "split": "0.3.3",
601 "stream-combiner": "0.2.2", 598 "stream-combiner": "0.0.4",
602 "through": "2.3.8" 599 "through": "2.3.8"
603 } 600 }
604 }, 601 },
@@ -704,12 +701,6 @@
704 "integrity": "sha1-Wb+1DNkF9g18OUzT2ayqtOatk04=", 701 "integrity": "sha1-Wb+1DNkF9g18OUzT2ayqtOatk04=",
705 "dev": true 702 "dev": true
706 }, 703 },
707 "flatmap-stream": {
708 "version": "0.1.0",
709 "resolved": "https://registry.npmjs.org/flatmap-stream/-/flatmap-stream-0.1.0.tgz",
710 "integrity": "sha512-Nlic4ZRYxikqnK5rj3YoxDVKGGtUjcNDUtvQ7XsdGLZmMwdUYnXf10o1zcXtzEZTBgc6GxeRpQxV/Wu3WPIIHA==",
711 "dev": true
712 },
713 "for-in": { 704 "for-in": {
714 "version": "1.0.2", 705 "version": "1.0.2",
715 "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", 706 "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz",
@@ -745,8 +736,7 @@
745 "from": { 736 "from": {
746 "version": "0.1.7", 737 "version": "0.1.7",
747 "resolved": "https://registry.npmjs.org/from/-/from-0.1.7.tgz", 738 "resolved": "https://registry.npmjs.org/from/-/from-0.1.7.tgz",
748 "integrity": "sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4=", 739 "integrity": "sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4="
749 "dev": true
750 }, 740 },
751 "fs.realpath": { 741 "fs.realpath": {
752 "version": "1.0.0", 742 "version": "1.0.0",
@@ -986,7 +976,7 @@
986 "integrity": "sha512-/9vtSk9eI9DEWCqzGieglPqmx0WUQ9pwPHyHFpKmfxqdgqGJC2l0vFMdYs54hLdDsMDEZFLDL2J4ikjc4hQ5HQ==", 976 "integrity": "sha512-/9vtSk9eI9DEWCqzGieglPqmx0WUQ9pwPHyHFpKmfxqdgqGJC2l0vFMdYs54hLdDsMDEZFLDL2J4ikjc4hQ5HQ==",
987 "dev": true, 977 "dev": true,
988 "requires": { 978 "requires": {
989 "event-stream": "3.3.6", 979 "event-stream": "3.3.4",
990 "node.extend": "1.1.6", 980 "node.extend": "1.1.6",
991 "request": "2.88.0", 981 "request": "2.88.0",
992 "through2": "2.0.3", 982 "through2": "2.0.3",
@@ -1065,7 +1055,7 @@
1065 "integrity": "sha1-wWUyBzLRks5W/ZQnH/oSMjS/KuA=", 1055 "integrity": "sha1-wWUyBzLRks5W/ZQnH/oSMjS/KuA=",
1066 "dev": true, 1056 "dev": true,
1067 "requires": { 1057 "requires": {
1068 "event-stream": "3.3.6", 1058 "event-stream": "3.3.4",
1069 "mkdirp": "0.5.1", 1059 "mkdirp": "0.5.1",
1070 "queue": "3.1.0", 1060 "queue": "3.1.0",
1071 "vinyl-fs": "2.4.4" 1061 "vinyl-fs": "2.4.4"
@@ -1077,7 +1067,7 @@
1077 "integrity": "sha512-0QfbCH2a1k2qkTLWPqTX+QO4qNsHn3kC546YhAP3/n0h+nvtyGITDuDrYBMDZeW4WnFijmkOvBWa5HshTic1tw==", 1067 "integrity": "sha512-0QfbCH2a1k2qkTLWPqTX+QO4qNsHn3kC546YhAP3/n0h+nvtyGITDuDrYBMDZeW4WnFijmkOvBWa5HshTic1tw==",
1078 "dev": true, 1068 "dev": true,
1079 "requires": { 1069 "requires": {
1080 "event-stream": "3.3.6", 1070 "event-stream": "3.3.4",
1081 "streamifier": "0.1.1", 1071 "streamifier": "0.1.1",
1082 "tar": "2.2.1", 1072 "tar": "2.2.1",
1083 "through2": "2.0.3", 1073 "through2": "2.0.3",
@@ -1115,7 +1105,7 @@
1115 "integrity": "sha1-JOQGhdwFtxSZlSRQmeBZAmO+ja0=", 1105 "integrity": "sha1-JOQGhdwFtxSZlSRQmeBZAmO+ja0=",
1116 "dev": true, 1106 "dev": true,
1117 "requires": { 1107 "requires": {
1118 "event-stream": "3.3.6", 1108 "event-stream": "3.3.4",
1119 "queue": "4.5.0", 1109 "queue": "4.5.0",
1120 "through2": "2.0.3", 1110 "through2": "2.0.3",
1121 "vinyl": "2.2.0", 1111 "vinyl": "2.2.0",
@@ -1475,10 +1465,9 @@
1475 "dev": true 1465 "dev": true
1476 }, 1466 },
1477 "map-stream": { 1467 "map-stream": {
1478 "version": "0.0.7", 1468 "version": "0.1.0",
1479 "resolved": "https://registry.npmjs.org/map-stream/-/map-stream-0.0.7.tgz", 1469 "resolved": "http://registry.npmjs.org/map-stream/-/map-stream-0.1.0.tgz",
1480 "integrity": "sha1-ih8HiW2CsQkmvTdEokIACfiJdKg=", 1470 "integrity": "sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ="
1481 "dev": true
1482 }, 1471 },
1483 "markdown-it": { 1472 "markdown-it": {
1484 "version": "8.4.2", 1473 "version": "8.4.2",
@@ -1838,7 +1827,6 @@
1838 "version": "0.0.11", 1827 "version": "0.0.11",
1839 "resolved": "http://registry.npmjs.org/pause-stream/-/pause-stream-0.0.11.tgz", 1828 "resolved": "http://registry.npmjs.org/pause-stream/-/pause-stream-0.0.11.tgz",
1840 "integrity": "sha1-/lo0sMvOErWqaitAPuLnO2AvFEU=", 1829 "integrity": "sha1-/lo0sMvOErWqaitAPuLnO2AvFEU=",
1841 "dev": true,
1842 "requires": { 1830 "requires": {
1843 "through": "2.3.8" 1831 "through": "2.3.8"
1844 } 1832 }
@@ -2093,10 +2081,9 @@
2093 } 2081 }
2094 }, 2082 },
2095 "split": { 2083 "split": {
2096 "version": "1.0.1", 2084 "version": "0.3.3",
2097 "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", 2085 "resolved": "http://registry.npmjs.org/split/-/split-0.3.3.tgz",
2098 "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", 2086 "integrity": "sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8=",
2099 "dev": true,
2100 "requires": { 2087 "requires": {
2101 "through": "2.3.8" 2088 "through": "2.3.8"
2102 } 2089 }
@@ -2131,13 +2118,11 @@
2131 "dev": true 2118 "dev": true
2132 }, 2119 },
2133 "stream-combiner": { 2120 "stream-combiner": {
2134 "version": "0.2.2", 2121 "version": "0.0.4",
2135 "resolved": "http://registry.npmjs.org/stream-combiner/-/stream-combiner-0.2.2.tgz", 2122 "resolved": "http://registry.npmjs.org/stream-combiner/-/stream-combiner-0.0.4.tgz",
2136 "integrity": "sha1-rsjLrBd7Vrb0+kec7YwZEs7lKFg=", 2123 "integrity": "sha1-TV5DPBhSYd3mI8o/RMWGvPXErRQ=",
2137 "dev": true,
2138 "requires": { 2124 "requires": {
2139 "duplexer": "0.1.1", 2125 "duplexer": "0.1.1"
2140 "through": "2.3.8"
2141 } 2126 }
2142 }, 2127 },
2143 "stream-shift": { 2128 "stream-shift": {
@@ -2221,8 +2206,7 @@
2221 "through": { 2206 "through": {
2222 "version": "2.3.8", 2207 "version": "2.3.8",
2223 "resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz", 2208 "resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz",
2224 "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", 2209 "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
2225 "dev": true
2226 }, 2210 },
2227 "through2": { 2211 "through2": {
2228 "version": "2.0.3", 2212 "version": "2.0.3",
diff --git a/editors/code/package.json b/editors/code/package.json
index ea84a1ccb..f3b049e43 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -28,6 +28,7 @@
28 "singleQuote": true 28 "singleQuote": true
29 }, 29 },
30 "dependencies": { 30 "dependencies": {
31 "event-stream": "^3.3.4",
31 "vscode-languageclient": "^5.1.1" 32 "vscode-languageclient": "^5.1.1"
32 }, 33 },
33 "devDependencies": { 34 "devDependencies": {