aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock31
-rw-r--r--crates/ra_cli/Cargo.toml2
-rw-r--r--crates/ra_cli/src/help.rs72
-rw-r--r--crates/ra_cli/src/main.rs143
-rw-r--r--crates/ra_hir/src/ids.rs2
-rw-r--r--crates/ra_hir/src/nameres/raw.rs4
-rw-r--r--crates/ra_mbe/Cargo.toml4
-rw-r--r--crates/ra_mbe/src/lib.rs4
-rw-r--r--crates/ra_mbe/src/mbe_expander.rs118
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs36
-rw-r--r--crates/ra_mbe/src/tests.rs18
-rw-r--r--crates/ra_parser/src/grammar.rs4
-rw-r--r--crates/ra_parser/src/grammar/expressions.rs245
-rw-r--r--crates/ra_parser/src/grammar/expressions/atom.rs25
-rw-r--r--crates/ra_parser/src/grammar/items.rs2
-rw-r--r--crates/ra_parser/src/grammar/items/use_item.rs24
-rw-r--r--crates/ra_parser/src/grammar/params.rs9
-rw-r--r--crates/ra_parser/src/grammar/paths.rs9
-rw-r--r--crates/ra_parser/src/grammar/patterns.rs33
-rw-r--r--crates/ra_parser/src/grammar/type_args.rs25
-rw-r--r--crates/ra_parser/src/parser.rs328
-rw-r--r--crates/ra_syntax/src/tests.rs16
-rw-r--r--crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs5
-rw-r--r--crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.txt126
-rw-r--r--crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rs5
-rw-r--r--crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.txt50
-rw-r--r--crates/ra_tools/Cargo.toml2
-rw-r--r--crates/ra_tools/src/help.rs45
-rw-r--r--crates/ra_tools/src/main.rs117
-rw-r--r--docs/user/README.md4
-rw-r--r--editors/code/package.json2
-rw-r--r--editors/code/src/commands/cargo_watch.ts21
32 files changed, 895 insertions, 636 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 9092a87d3..4e08a0bd1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -185,16 +185,6 @@ dependencies = [
185] 185]
186 186
187[[package]] 187[[package]]
188name = "clap"
189version = "2.33.0"
190source = "registry+https://github.com/rust-lang/crates.io-index"
191dependencies = [
192 "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
193 "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
194 "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
195]
196
197[[package]]
198name = "clicolors-control" 188name = "clicolors-control"
199version = "1.0.1" 189version = "1.0.1"
200source = "registry+https://github.com/rust-lang/crates.io-index" 190source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -838,6 +828,11 @@ dependencies = [
838] 828]
839 829
840[[package]] 830[[package]]
831name = "pico-args"
832version = "0.2.0"
833source = "registry+https://github.com/rust-lang/crates.io-index"
834
835[[package]]
841name = "ppv-lite86" 836name = "ppv-lite86"
842version = "0.2.5" 837version = "0.2.5"
843source = "registry+https://github.com/rust-lang/crates.io-index" 838source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -930,9 +925,9 @@ dependencies = [
930name = "ra_cli" 925name = "ra_cli"
931version = "0.1.0" 926version = "0.1.0"
932dependencies = [ 927dependencies = [
933 "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
934 "flexi_logger 0.14.3 (registry+https://github.com/rust-lang/crates.io-index)", 928 "flexi_logger 0.14.3 (registry+https://github.com/rust-lang/crates.io-index)",
935 "indicatif 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", 929 "indicatif 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
930 "pico-args 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
936 "ra_batch 0.1.0", 931 "ra_batch 0.1.0",
937 "ra_db 0.1.0", 932 "ra_db 0.1.0",
938 "ra_hir 0.1.0", 933 "ra_hir 0.1.0",
@@ -1051,6 +1046,7 @@ dependencies = [
1051 "ra_tt 0.1.0", 1046 "ra_tt 0.1.0",
1052 "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", 1047 "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
1053 "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", 1048 "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
1049 "test_utils 0.1.0",
1054] 1050]
1055 1051
1056[[package]] 1052[[package]]
@@ -1111,8 +1107,8 @@ dependencies = [
1111name = "ra_tools" 1107name = "ra_tools"
1112version = "0.1.0" 1108version = "0.1.0"
1113dependencies = [ 1109dependencies = [
1114 "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
1115 "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", 1110 "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
1111 "pico-args 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
1116 "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", 1112 "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
1117 "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", 1113 "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
1118 "ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", 1114 "ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1594,14 +1590,6 @@ version = "0.1.9"
1594source = "registry+https://github.com/rust-lang/crates.io-index" 1590source = "registry+https://github.com/rust-lang/crates.io-index"
1595 1591
1596[[package]] 1592[[package]]
1597name = "textwrap"
1598version = "0.11.0"
1599source = "registry+https://github.com/rust-lang/crates.io-index"
1600dependencies = [
1601 "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
1602]
1603
1604[[package]]
1605name = "thread_local" 1593name = "thread_local"
1606version = "0.3.6" 1594version = "0.3.6"
1607source = "registry+https://github.com/rust-lang/crates.io-index" 1595source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1795,7 +1783,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
1795"checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" 1783"checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>"
1796"checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" 1784"checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>"
1797"checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" 1785"checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68"
1798"checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9"
1799"checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e" 1786"checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e"
1800"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" 1787"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
1801"checksum console 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b147390a412132d75d10dd3b7b175a69cf5fd95032f7503c7091b8831ba10242" 1788"checksum console 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b147390a412132d75d10dd3b7b175a69cf5fd95032f7503c7091b8831ba10242"
@@ -1873,6 +1860,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
1873"checksum paste-impl 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4214c9e912ef61bf42b81ba9a47e8aad1b2ffaf739ab162bf96d1e011f54e6c5" 1860"checksum paste-impl 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4214c9e912ef61bf42b81ba9a47e8aad1b2ffaf739ab162bf96d1e011f54e6c5"
1874"checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" 1861"checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
1875"checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" 1862"checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f"
1863"checksum pico-args 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2fce25154205cf4360b456fd7d48994afe20663b77e3bd3d0a353a2fccf7f22c"
1876"checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b" 1864"checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b"
1877"checksum proc-macro-hack 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e688f31d92ffd7c1ddc57a1b4e6d773c0f2a14ee437a4b0a4f5a69c80eb221c8" 1865"checksum proc-macro-hack 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e688f31d92ffd7c1ddc57a1b4e6d773c0f2a14ee437a4b0a4f5a69c80eb221c8"
1878"checksum proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "175a40b9cf564ce9bf050654633dbf339978706b8ead1a907bb970b63185dd95" 1866"checksum proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "175a40b9cf564ce9bf050654633dbf339978706b8ead1a907bb970b63185dd95"
@@ -1930,7 +1918,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
1930"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" 1918"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
1931"checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625" 1919"checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625"
1932"checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579" 1920"checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579"
1933"checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
1934"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" 1921"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
1935"checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865" 1922"checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865"
1936"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" 1923"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f"
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml
index 205dd223b..d42ac3ad4 100644
--- a/crates/ra_cli/Cargo.toml
+++ b/crates/ra_cli/Cargo.toml
@@ -6,7 +6,7 @@ authors = ["rust-analyzer developers"]
6publish = false 6publish = false
7 7
8[dependencies] 8[dependencies]
9clap = { version = "2.32.0", default-features = false } 9pico-args = "0.2.0"
10flexi_logger = "0.14.0" 10flexi_logger = "0.14.0"
11indicatif = "0.11.0" 11indicatif = "0.11.0"
12 12
diff --git a/crates/ra_cli/src/help.rs b/crates/ra_cli/src/help.rs
new file mode 100644
index 000000000..5171578f0
--- /dev/null
+++ b/crates/ra_cli/src/help.rs
@@ -0,0 +1,72 @@
1pub const GLOBAL_HELP: &str = "ra-cli
2
3USAGE:
4 ra_cli <SUBCOMMAND>
5
6FLAGS:
7 -h, --help Prints help information
8
9SUBCOMMANDS:
10 analysis-bench
11 analysis-stats
12 highlight
13 parse
14 symbols";
15
16pub const ANALYSIS_BENCH_HELP: &str = "ra_cli-analysis-bench
17
18USAGE:
19 ra_cli analysis-bench [FLAGS] [OPTIONS] [PATH]
20
21FLAGS:
22 -h, --help Prints help information
23 -v, --verbose
24
25OPTIONS:
26 --complete <PATH:LINE:COLUMN> Compute completions at this location
27 --highlight <PATH> Hightlight this file
28
29ARGS:
30 <PATH> Project to analyse";
31
32pub const ANALYSIS_STATS_HELP: &str = "ra-cli-analysis-stats
33
34USAGE:
35 ra_cli analysis-stats [FLAGS] [OPTIONS] [PATH]
36
37FLAGS:
38 -h, --help Prints help information
39 --memory-usage
40 -v, --verbose
41
42OPTIONS:
43 -o <ONLY>
44
45ARGS:
46 <PATH>";
47
48pub const HIGHLIGHT_HELP: &str = "ra-cli-highlight
49
50USAGE:
51 ra_cli highlight [FLAGS]
52
53FLAGS:
54 -h, --help Prints help information
55 -r, --rainbow";
56
57pub const SYMBOLS_HELP: &str = "ra-cli-symbols
58
59USAGE:
60 ra_cli highlight [FLAGS]
61
62FLAGS:
63 -h, --help Prints help inforamtion";
64
65pub const PARSE_HELP: &str = "ra-cli-parse
66
67USAGE:
68 ra_cli parse [FLAGS]
69
70FLAGS:
71 -h, --help Prints help inforamtion
72 --no-dump";
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs
index de8191ca3..e6334cf56 100644
--- a/crates/ra_cli/src/main.rs
+++ b/crates/ra_cli/src/main.rs
@@ -1,10 +1,11 @@
1mod analysis_stats; 1mod analysis_stats;
2mod analysis_bench; 2mod analysis_bench;
3mod help;
3 4
4use std::{error::Error, io::Read}; 5use std::{error::Error, fmt::Write, io::Read};
5 6
6use clap::{App, Arg, SubCommand};
7use flexi_logger::Logger; 7use flexi_logger::Logger;
8use pico_args::Arguments;
8use ra_ide_api::{file_structure, Analysis}; 9use ra_ide_api::{file_structure, Analysis};
9use ra_prof::profile; 10use ra_prof::profile;
10use ra_syntax::{AstNode, SourceFile}; 11use ra_syntax::{AstNode, SourceFile};
@@ -13,77 +14,89 @@ type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
13 14
14fn main() -> Result<()> { 15fn main() -> Result<()> {
15 Logger::with_env().start()?; 16 Logger::with_env().start()?;
16 let matches = App::new("ra-cli") 17
17 .setting(clap::AppSettings::SubcommandRequiredElseHelp) 18 let subcommand = match std::env::args_os().nth(1) {
18 .subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump"))) 19 None => {
19 .subcommand(SubCommand::with_name("symbols")) 20 eprintln!("{}", help::GLOBAL_HELP);
20 .subcommand( 21 return Ok(());
21 SubCommand::with_name("highlight") 22 }
22 .arg(Arg::with_name("rainbow").short("r").long("rainbow")), 23 Some(s) => s,
23 ) 24 };
24 .subcommand( 25 let mut matches = Arguments::from_vec(std::env::args_os().skip(2).collect());
25 SubCommand::with_name("analysis-stats") 26
26 .arg(Arg::with_name("verbose").short("v").long("verbose")) 27 match &*subcommand.to_string_lossy() {
27 .arg(Arg::with_name("memory-usage").long("memory-usage")) 28 "parse" => {
28 .arg(Arg::with_name("only").short("o").takes_value(true)) 29 if matches.contains(["-h", "--help"]) {
29 .arg(Arg::with_name("path")), 30 eprintln!("{}", help::PARSE_HELP);
30 ) 31 return Ok(());
31 .subcommand( 32 }
32 SubCommand::with_name("analysis-bench") 33 let no_dump = matches.contains("--no-dump");
33 .arg(Arg::with_name("verbose").short("v").long("verbose")) 34 matches.finish().or_else(handle_extra_flags)?;
34 .arg( 35
35 Arg::with_name("highlight")
36 .long("highlight")
37 .takes_value(true)
38 .conflicts_with("complete")
39 .value_name("PATH")
40 .help("highlight this file"),
41 )
42 .arg(
43 Arg::with_name("complete")
44 .long("complete")
45 .takes_value(true)
46 .conflicts_with("highlight")
47 .value_name("PATH:LINE:COLUMN")
48 .help("compute completions at this location"),
49 )
50 .arg(Arg::with_name("path").value_name("PATH").help("project to analyze")),
51 )
52 .get_matches();
53 match matches.subcommand() {
54 ("parse", Some(matches)) => {
55 let _p = profile("parsing"); 36 let _p = profile("parsing");
56 let file = file()?; 37 let file = file()?;
57 if !matches.is_present("no-dump") { 38 if !no_dump {
58 println!("{:#?}", file.syntax()); 39 println!("{:#?}", file.syntax());
59 } 40 }
60 std::mem::forget(file); 41 std::mem::forget(file);
61 } 42 }
62 ("symbols", _) => { 43 "symbols" => {
44 if matches.contains(["-h", "--help"]) {
45 eprintln!("{}", help::SYMBOLS_HELP);
46 return Ok(());
47 }
48 matches.finish().or_else(handle_extra_flags)?;
63 let file = file()?; 49 let file = file()?;
64 for s in file_structure(&file) { 50 for s in file_structure(&file) {
65 println!("{:?}", s); 51 println!("{:?}", s);
66 } 52 }
67 } 53 }
68 ("highlight", Some(matches)) => { 54 "highlight" => {
55 if matches.contains(["-h", "--help"]) {
56 eprintln!("{}", help::HIGHLIGHT_HELP);
57 return Ok(());
58 }
59 let rainbow_opt = matches.contains(["-r", "--rainbow"]);
60 matches.finish().or_else(handle_extra_flags)?;
69 let (analysis, file_id) = Analysis::from_single_file(read_stdin()?); 61 let (analysis, file_id) = Analysis::from_single_file(read_stdin()?);
70 let html = analysis.highlight_as_html(file_id, matches.is_present("rainbow")).unwrap(); 62 let html = analysis.highlight_as_html(file_id, rainbow_opt).unwrap();
71 println!("{}", html); 63 println!("{}", html);
72 } 64 }
73 ("analysis-stats", Some(matches)) => { 65 "analysis-stats" => {
74 let verbose = matches.is_present("verbose"); 66 if matches.contains(["-h", "--help"]) {
75 let memory_usage = matches.is_present("memory-usage"); 67 eprintln!("{}", help::ANALYSIS_STATS_HELP);
76 let path = matches.value_of("path").unwrap_or(""); 68 return Ok(());
77 let only = matches.value_of("only"); 69 }
78 analysis_stats::run(verbose, memory_usage, path.as_ref(), only)?; 70 let verbose = matches.contains(["-v", "--verbose"]);
71 let memory_usage = matches.contains("--memory-usage");
72 let path: String = matches.value_from_str("--path")?.unwrap_or_default();
73 let only = matches.value_from_str(["-o", "--only"])?.map(|v: String| v.to_owned());
74 matches.finish().or_else(handle_extra_flags)?;
75 analysis_stats::run(
76 verbose,
77 memory_usage,
78 path.as_ref(),
79 only.as_ref().map(String::as_ref),
80 )?;
79 } 81 }
80 ("analysis-bench", Some(matches)) => { 82 "analysis-bench" => {
81 let verbose = matches.is_present("verbose"); 83 if matches.contains(["-h", "--help"]) {
82 let path = matches.value_of("path").unwrap_or(""); 84 eprintln!("{}", help::ANALYSIS_BENCH_HELP);
83 let op = if let Some(path) = matches.value_of("highlight") { 85 return Ok(());
86 }
87 let verbose = matches.contains(["-v", "--verbose"]);
88 let path: String = matches.value_from_str("--path")?.unwrap_or_default();
89 let highlight_path = matches.value_from_str("--highlight")?;
90 let complete_path = matches.value_from_str("--complete")?;
91 if highlight_path.is_some() && complete_path.is_some() {
92 panic!("either --highlight or --complete must be set, not both")
93 }
94 let op = if let Some(path) = highlight_path {
95 let path: String = path;
84 analysis_bench::Op::Highlight { path: path.into() } 96 analysis_bench::Op::Highlight { path: path.into() }
85 } else if let Some(path_line_col) = matches.value_of("complete") { 97 } else if let Some(path_line_col) = complete_path {
86 let (path_line, column) = rsplit_at_char(path_line_col, ':')?; 98 let path_line_col: String = path_line_col;
99 let (path_line, column) = rsplit_at_char(path_line_col.as_str(), ':')?;
87 let (path, line) = rsplit_at_char(path_line, ':')?; 100 let (path, line) = rsplit_at_char(path_line, ':')?;
88 analysis_bench::Op::Complete { 101 analysis_bench::Op::Complete {
89 path: path.into(), 102 path: path.into(),
@@ -93,13 +106,27 @@ fn main() -> Result<()> {
93 } else { 106 } else {
94 panic!("either --highlight or --complete must be set") 107 panic!("either --highlight or --complete must be set")
95 }; 108 };
109 matches.finish().or_else(handle_extra_flags)?;
96 analysis_bench::run(verbose, path.as_ref(), op)?; 110 analysis_bench::run(verbose, path.as_ref(), op)?;
97 } 111 }
98 _ => unreachable!(), 112 _ => eprintln!("{}", help::GLOBAL_HELP),
99 } 113 }
100 Ok(()) 114 Ok(())
101} 115}
102 116
117fn handle_extra_flags(e: pico_args::Error) -> Result<()> {
118 if let pico_args::Error::UnusedArgsLeft(flags) = e {
119 let mut invalid_flags = String::new();
120 for flag in flags {
121 write!(&mut invalid_flags, "{}, ", flag)?;
122 }
123 let (invalid_flags, _) = invalid_flags.split_at(invalid_flags.len() - 2);
124 Err(format!("Invalid flags: {}", invalid_flags).into())
125 } else {
126 Err(e.to_string().into())
127 }
128}
129
103fn file() -> Result<SourceFile> { 130fn file() -> Result<SourceFile> {
104 let text = read_stdin()?; 131 let text = read_stdin()?;
105 Ok(SourceFile::parse(&text).tree()) 132 Ok(SourceFile::parse(&text).tree())
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs
index e0d0d4209..9ea4e695d 100644
--- a/crates/ra_hir/src/ids.rs
+++ b/crates/ra_hir/src/ids.rs
@@ -90,7 +90,7 @@ impl HirFileId {
90 }) 90 })
91 .ok()?; 91 .ok()?;
92 match macro_file.macro_file_kind { 92 match macro_file.macro_file_kind {
93 MacroFileKind::Items => Some(Parse::to_syntax(mbe::token_tree_to_ast_item_list(&tt))), 93 MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax),
94 MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax), 94 MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax),
95 } 95 }
96 } 96 }
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs
index c646d3d00..04b97cb90 100644
--- a/crates/ra_hir/src/nameres/raw.rs
+++ b/crates/ra_hir/src/nameres/raw.rs
@@ -76,8 +76,10 @@ impl RawItems {
76 source_map: ImportSourceMap::default(), 76 source_map: ImportSourceMap::default(),
77 }; 77 };
78 if let Some(node) = db.parse_or_expand(file_id) { 78 if let Some(node) = db.parse_or_expand(file_id) {
79 if let Some(source_file) = ast::SourceFile::cast(node) { 79 if let Some(source_file) = ast::SourceFile::cast(node.clone()) {
80 collector.process_module(None, source_file); 80 collector.process_module(None, source_file);
81 } else if let Some(item_list) = ast::MacroItems::cast(node) {
82 collector.process_module(None, item_list);
81 } 83 }
82 } 84 }
83 (Arc::new(collector.raw_items), Arc::new(collector.source_map)) 85 (Arc::new(collector.raw_items), Arc::new(collector.source_map))
diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml
index 68f559295..b058dde91 100644
--- a/crates/ra_mbe/Cargo.toml
+++ b/crates/ra_mbe/Cargo.toml
@@ -12,3 +12,7 @@ itertools = "0.8.0"
12rustc-hash = "1.0.0" 12rustc-hash = "1.0.0"
13smallvec = "0.6.9" 13smallvec = "0.6.9"
14log = "0.4.5" 14log = "0.4.5"
15
16[dev-dependencies]
17test_utils = { path = "../test_utils" }
18
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 52c3d03b5..f07f000ff 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -41,8 +41,8 @@ pub enum ExpandError {
41} 41}
42 42
43pub use crate::syntax_bridge::{ 43pub use crate::syntax_bridge::{
44 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_ast_item_list, token_tree_to_expr, 44 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items,
45 token_tree_to_macro_items, token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, 45 token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty,
46}; 46};
47 47
48/// This struct contains AST for a single `macro_rules` definition. What might 48/// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs
index 01641fdee..78df96880 100644
--- a/crates/ra_mbe/src/mbe_expander.rs
+++ b/crates/ra_mbe/src/mbe_expander.rs
@@ -81,21 +81,26 @@ struct Bindings {
81 81
82#[derive(Debug)] 82#[derive(Debug)]
83enum Binding { 83enum Binding {
84 Simple(tt::TokenTree), 84 Fragment(Fragment),
85 Nested(Vec<Binding>), 85 Nested(Vec<Binding>),
86 Empty, 86 Empty,
87} 87}
88 88
89#[derive(Debug, Clone)]
90enum Fragment {
91 /// token fragments are just copy-pasted into the output
92 Tokens(tt::TokenTree),
93 /// Ast fragments are inserted with fake delimiters, so as to make things
94 /// like `$i * 2` where `$i = 1 + 1` work as expectd.
95 Ast(tt::TokenTree),
96}
97
89impl Bindings { 98impl Bindings {
90 fn push_optional(&mut self, name: &SmolStr) { 99 fn push_optional(&mut self, name: &SmolStr) {
91 // FIXME: Do we have a better way to represent an empty token ? 100 // FIXME: Do we have a better way to represent an empty token ?
92 // Insert an empty subtree for empty token 101 // Insert an empty subtree for empty token
93 self.inner.insert( 102 let tt = tt::Subtree { delimiter: tt::Delimiter::None, token_trees: vec![] }.into();
94 name.clone(), 103 self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
95 Binding::Simple(
96 tt::Subtree { delimiter: tt::Delimiter::None, token_trees: vec![] }.into(),
97 ),
98 );
99 } 104 }
100 105
101 fn push_empty(&mut self, name: &SmolStr) { 106 fn push_empty(&mut self, name: &SmolStr) {
@@ -106,13 +111,13 @@ impl Bindings {
106 self.inner.contains_key(name) 111 self.inner.contains_key(name)
107 } 112 }
108 113
109 fn get(&self, name: &SmolStr, nesting: &[usize]) -> Result<&tt::TokenTree, ExpandError> { 114 fn get(&self, name: &SmolStr, nesting: &[usize]) -> Result<&Fragment, ExpandError> {
110 let mut b = self.inner.get(name).ok_or_else(|| { 115 let mut b = self.inner.get(name).ok_or_else(|| {
111 ExpandError::BindingError(format!("could not find binding `{}`", name)) 116 ExpandError::BindingError(format!("could not find binding `{}`", name))
112 })?; 117 })?;
113 for &idx in nesting.iter() { 118 for &idx in nesting.iter() {
114 b = match b { 119 b = match b {
115 Binding::Simple(_) => break, 120 Binding::Fragment(_) => break,
116 Binding::Nested(bs) => bs.get(idx).ok_or_else(|| { 121 Binding::Nested(bs) => bs.get(idx).ok_or_else(|| {
117 ExpandError::BindingError(format!("could not find nested binding `{}`", name)) 122 ExpandError::BindingError(format!("could not find nested binding `{}`", name))
118 })?, 123 })?,
@@ -125,7 +130,7 @@ impl Bindings {
125 }; 130 };
126 } 131 }
127 match b { 132 match b {
128 Binding::Simple(it) => Ok(it), 133 Binding::Fragment(it) => Ok(it),
129 Binding::Nested(_) => Err(ExpandError::BindingError(format!( 134 Binding::Nested(_) => Err(ExpandError::BindingError(format!(
130 "expected simple binding, found nested binding `{}`", 135 "expected simple binding, found nested binding `{}`",
131 name 136 name
@@ -195,8 +200,8 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
195 crate::Leaf::Var(crate::Var { text, kind }) => { 200 crate::Leaf::Var(crate::Var { text, kind }) => {
196 let kind = kind.as_ref().ok_or(ExpandError::UnexpectedToken)?; 201 let kind = kind.as_ref().ok_or(ExpandError::UnexpectedToken)?;
197 match match_meta_var(kind.as_str(), input)? { 202 match match_meta_var(kind.as_str(), input)? {
198 Some(tt) => { 203 Some(fragment) => {
199 res.inner.insert(text.clone(), Binding::Simple(tt)); 204 res.inner.insert(text.clone(), Binding::Fragment(fragment));
200 } 205 }
201 None => res.push_optional(text), 206 None => res.push_optional(text),
202 } 207 }
@@ -292,7 +297,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
292 Ok(res) 297 Ok(res)
293} 298}
294 299
295fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<tt::TokenTree>, ExpandError> { 300fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<Fragment>, ExpandError> {
296 let fragment = match kind { 301 let fragment = match kind {
297 "path" => Path, 302 "path" => Path,
298 "expr" => Expr, 303 "expr" => Expr,
@@ -303,7 +308,7 @@ fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<tt::TokenTr
303 "meta" => MetaItem, 308 "meta" => MetaItem,
304 "item" => Item, 309 "item" => Item,
305 _ => { 310 _ => {
306 let binding = match kind { 311 let tt = match kind {
307 "ident" => { 312 "ident" => {
308 let ident = input.eat_ident().ok_or(ExpandError::UnexpectedToken)?.clone(); 313 let ident = input.eat_ident().ok_or(ExpandError::UnexpectedToken)?.clone();
309 tt::Leaf::from(ident).into() 314 tt::Leaf::from(ident).into()
@@ -321,11 +326,12 @@ fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<tt::TokenTr
321 }, 326 },
322 _ => return Err(ExpandError::UnexpectedToken), 327 _ => return Err(ExpandError::UnexpectedToken),
323 }; 328 };
324 return Ok(Some(binding)); 329 return Ok(Some(Fragment::Tokens(tt)));
325 } 330 }
326 }; 331 };
327 let binding = input.eat_fragment(fragment).ok_or(ExpandError::UnexpectedToken)?; 332 let tt = input.eat_fragment(fragment).ok_or(ExpandError::UnexpectedToken)?;
328 Ok(Some(binding)) 333 let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) };
334 Ok(Some(fragment))
329} 335}
330 336
331#[derive(Debug)] 337#[derive(Debug)]
@@ -339,45 +345,20 @@ fn expand_subtree(
339 template: &crate::Subtree, 345 template: &crate::Subtree,
340 ctx: &mut ExpandCtx, 346 ctx: &mut ExpandCtx,
341) -> Result<tt::Subtree, ExpandError> { 347) -> Result<tt::Subtree, ExpandError> {
342 let token_trees = template 348 let mut buf: Vec<tt::TokenTree> = Vec::new();
343 .token_trees 349 for tt in template.token_trees.iter() {
344 .iter() 350 let tt = expand_tt(tt, ctx)?;
345 .map(|it| expand_tt(it, ctx)) 351 push_fragment(&mut buf, tt);
346 .filter(|it| {
347 // Filter empty subtree
348 if let Ok(tt::TokenTree::Subtree(subtree)) = it {
349 subtree.delimiter != tt::Delimiter::None || !subtree.token_trees.is_empty()
350 } else {
351 true
352 }
353 })
354 .collect::<Result<Vec<_>, ExpandError>>()?;
355
356 Ok(tt::Subtree { token_trees, delimiter: template.delimiter })
357}
358
359/// Reduce single token subtree to single token
360/// In `tt` matcher case, all tt tokens will be braced by a Delimiter::None
361/// which makes all sort of problems.
362fn reduce_single_token(mut subtree: tt::Subtree) -> tt::TokenTree {
363 if subtree.delimiter != tt::Delimiter::None || subtree.token_trees.len() != 1 {
364 return subtree.into();
365 } 352 }
366 353
367 match subtree.token_trees.pop().unwrap() { 354 Ok(tt::Subtree { delimiter: template.delimiter, token_trees: buf })
368 tt::TokenTree::Subtree(subtree) => reduce_single_token(subtree),
369 tt::TokenTree::Leaf(token) => token.into(),
370 }
371} 355}
372 356
373fn expand_tt( 357fn expand_tt(template: &crate::TokenTree, ctx: &mut ExpandCtx) -> Result<Fragment, ExpandError> {
374 template: &crate::TokenTree,
375 ctx: &mut ExpandCtx,
376) -> Result<tt::TokenTree, ExpandError> {
377 let res: tt::TokenTree = match template { 358 let res: tt::TokenTree = match template {
378 crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, ctx)?.into(), 359 crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, ctx)?.into(),
379 crate::TokenTree::Repeat(repeat) => { 360 crate::TokenTree::Repeat(repeat) => {
380 let mut token_trees: Vec<tt::TokenTree> = Vec::new(); 361 let mut buf: Vec<tt::TokenTree> = Vec::new();
381 ctx.nesting.push(0); 362 ctx.nesting.push(0);
382 // Dirty hack to make macro-expansion terminate. 363 // Dirty hack to make macro-expansion terminate.
383 // This should be replaced by a propper macro-by-example implementation 364 // This should be replaced by a propper macro-by-example implementation
@@ -418,23 +399,23 @@ fn expand_tt(
418 399
419 let idx = ctx.nesting.pop().unwrap(); 400 let idx = ctx.nesting.pop().unwrap();
420 ctx.nesting.push(idx + 1); 401 ctx.nesting.push(idx + 1);
421 token_trees.push(reduce_single_token(t)); 402 push_subtree(&mut buf, t);
422 403
423 if let Some(ref sep) = repeat.separator { 404 if let Some(ref sep) = repeat.separator {
424 match sep { 405 match sep {
425 crate::Separator::Ident(ident) => { 406 crate::Separator::Ident(ident) => {
426 has_seps = 1; 407 has_seps = 1;
427 token_trees.push(tt::Leaf::from(ident.clone()).into()); 408 buf.push(tt::Leaf::from(ident.clone()).into());
428 } 409 }
429 crate::Separator::Literal(lit) => { 410 crate::Separator::Literal(lit) => {
430 has_seps = 1; 411 has_seps = 1;
431 token_trees.push(tt::Leaf::from(lit.clone()).into()); 412 buf.push(tt::Leaf::from(lit.clone()).into());
432 } 413 }
433 414
434 crate::Separator::Puncts(puncts) => { 415 crate::Separator::Puncts(puncts) => {
435 has_seps = puncts.len(); 416 has_seps = puncts.len();
436 for punct in puncts { 417 for punct in puncts {
437 token_trees.push(tt::Leaf::from(*punct).into()); 418 buf.push(tt::Leaf::from(*punct).into());
438 } 419 }
439 } 420 }
440 } 421 }
@@ -450,16 +431,16 @@ fn expand_tt(
450 431
451 ctx.nesting.pop().unwrap(); 432 ctx.nesting.pop().unwrap();
452 for _ in 0..has_seps { 433 for _ in 0..has_seps {
453 token_trees.pop(); 434 buf.pop();
454 } 435 }
455 436
456 if crate::RepeatKind::OneOrMore == repeat.kind && counter == 0 { 437 if crate::RepeatKind::OneOrMore == repeat.kind && counter == 0 {
457 return Err(ExpandError::UnexpectedToken); 438 return Err(ExpandError::UnexpectedToken);
458 } 439 }
459 440
460 // Check if it is a singel token subtree without any delimiter 441 // Check if it is a single token subtree without any delimiter
461 // e.g {Delimiter:None> ['>'] /Delimiter:None>} 442 // e.g {Delimiter:None> ['>'] /Delimiter:None>}
462 reduce_single_token(tt::Subtree { token_trees, delimiter: tt::Delimiter::None }) 443 tt::Subtree { delimiter: tt::Delimiter::None, token_trees: buf }.into()
463 } 444 }
464 crate::TokenTree::Leaf(leaf) => match leaf { 445 crate::TokenTree::Leaf(leaf) => match leaf {
465 crate::Leaf::Ident(ident) => { 446 crate::Leaf::Ident(ident) => {
@@ -500,20 +481,15 @@ fn expand_tt(
500 } 481 }
501 .into() 482 .into()
502 } else { 483 } else {
503 let tkn = ctx.bindings.get(&v.text, &ctx.nesting)?.clone(); 484 let fragment = ctx.bindings.get(&v.text, &ctx.nesting)?.clone();
504 ctx.var_expanded = true; 485 ctx.var_expanded = true;
505 486 return Ok(fragment);
506 if let tt::TokenTree::Subtree(subtree) = tkn {
507 reduce_single_token(subtree)
508 } else {
509 tkn
510 }
511 } 487 }
512 } 488 }
513 crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(), 489 crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(),
514 }, 490 },
515 }; 491 };
516 Ok(res) 492 Ok(Fragment::Tokens(res))
517} 493}
518 494
519#[cfg(test)] 495#[cfg(test)]
@@ -586,3 +562,17 @@ mod tests {
586 expand_rule(&rules.rules[0], &invocation_tt) 562 expand_rule(&rules.rules[0], &invocation_tt)
587 } 563 }
588} 564}
565
566fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
567 match fragment {
568 Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
569 Fragment::Tokens(tt) | Fragment::Ast(tt) => buf.push(tt),
570 }
571}
572
573fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
574 match tt.delimiter {
575 tt::Delimiter::None => buf.extend(tt.token_trees),
576 _ => buf.push(tt.into()),
577 }
578}
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index a380b1cfd..2d035307b 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -46,31 +46,19 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
46// * TraitItems(SmallVec<[ast::TraitItem; 1]>) 46// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
47// * ImplItems(SmallVec<[ast::ImplItem; 1]>) 47// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
48// * ForeignItems(SmallVec<[ast::ForeignItem; 1]> 48// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
49//
50//
51
52fn token_tree_to_syntax_node<F>(tt: &tt::Subtree, f: F) -> Result<Parse<SyntaxNode>, ExpandError>
53where
54 F: Fn(&mut dyn ra_parser::TokenSource, &mut dyn ra_parser::TreeSink),
55{
56 let tokens = [tt.clone().into()];
57 let buffer = TokenBuffer::new(&tokens);
58 let mut token_source = SubtreeTokenSource::new(&buffer);
59 let mut tree_sink = TtTreeSink::new(buffer.begin());
60 f(&mut token_source, &mut tree_sink);
61 if tree_sink.roots.len() != 1 {
62 return Err(ExpandError::ConversionError);
63 }
64 //FIXME: would be cool to report errors
65 let parse = tree_sink.inner.finish();
66 Ok(parse)
67}
68 49
69fn fragment_to_syntax_node( 50fn fragment_to_syntax_node(
70 tt: &tt::Subtree, 51 tt: &tt::Subtree,
71 fragment_kind: FragmentKind, 52 fragment_kind: FragmentKind,
72) -> Result<Parse<SyntaxNode>, ExpandError> { 53) -> Result<Parse<SyntaxNode>, ExpandError> {
73 let tokens = [tt.clone().into()]; 54 let tmp;
55 let tokens = match tt {
56 tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
57 _ => {
58 tmp = [tt.clone().into()];
59 &tmp[..]
60 }
61 };
74 let buffer = TokenBuffer::new(&tokens); 62 let buffer = TokenBuffer::new(&tokens);
75 let mut token_source = SubtreeTokenSource::new(&buffer); 63 let mut token_source = SubtreeTokenSource::new(&buffer);
76 let mut tree_sink = TtTreeSink::new(buffer.begin()); 64 let mut tree_sink = TtTreeSink::new(buffer.begin());
@@ -108,17 +96,11 @@ pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStm
108} 96}
109 97
110/// Parses the token tree (result of macro expansion) as a sequence of items 98/// Parses the token tree (result of macro expansion) as a sequence of items
111pub fn token_tree_to_macro_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> { 99pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
112 let parse = fragment_to_syntax_node(tt, Items)?; 100 let parse = fragment_to_syntax_node(tt, Items)?;
113 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) 101 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
114} 102}
115 103
116/// Parses the token tree (result of macro expansion) as a sequence of items
117pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> Parse<ast::SourceFile> {
118 let parse = token_tree_to_syntax_node(tt, ra_parser::parse).unwrap();
119 parse.cast().unwrap()
120}
121
122impl TokenMap { 104impl TokenMap {
123 pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> { 105 pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
124 let idx = tt.0 as usize; 106 let idx = tt.0 as usize;
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 034ea639b..312fa4626 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1,4 +1,5 @@
1use ra_syntax::{ast, AstNode, NodeOrToken}; 1use ra_syntax::{ast, AstNode, NodeOrToken};
2use test_utils::assert_eq_text;
2 3
3use super::*; 4use super::*;
4 5
@@ -69,7 +70,7 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
69 70
70pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems { 71pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems {
71 let expanded = expand(rules, invocation); 72 let expanded = expand(rules, invocation);
72 token_tree_to_macro_items(&expanded).unwrap().tree() 73 token_tree_to_items(&expanded).unwrap().tree()
73} 74}
74 75
75#[allow(unused)] 76#[allow(unused)]
@@ -152,11 +153,10 @@ pub(crate) fn assert_expansion(
152 153
153 // wrap the given text to a macro call 154 // wrap the given text to a macro call
154 let expected = text_to_tokentree(&expected); 155 let expected = text_to_tokentree(&expected);
155
156 let (expanded_tree, expected_tree) = match kind { 156 let (expanded_tree, expected_tree) = match kind {
157 MacroKind::Items => { 157 MacroKind::Items => {
158 let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree(); 158 let expanded_tree = token_tree_to_items(&expanded).unwrap().tree();
159 let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree(); 159 let expected_tree = token_tree_to_items(&expected).unwrap().tree();
160 160
161 ( 161 (
162 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), 162 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
@@ -178,7 +178,7 @@ pub(crate) fn assert_expansion(
178 let expected_tree = expected_tree.replace("C_C__C", "$crate"); 178 let expected_tree = expected_tree.replace("C_C__C", "$crate");
179 assert_eq!( 179 assert_eq!(
180 expanded_tree, expected_tree, 180 expanded_tree, expected_tree,
181 "left => {}\nright => {}", 181 "\nleft:\n{}\nright:\n{}",
182 expanded_tree, expected_tree, 182 expanded_tree, expected_tree,
183 ); 183 );
184 184
@@ -410,7 +410,7 @@ fn test_expand_to_item_list() {
410 ", 410 ",
411 ); 411 );
412 let expansion = expand(&rules, "structs!(Foo, Bar);"); 412 let expansion = expand(&rules, "structs!(Foo, Bar);");
413 let tree = token_tree_to_macro_items(&expansion).unwrap().tree(); 413 let tree = token_tree_to_items(&expansion).unwrap().tree();
414 assert_eq!( 414 assert_eq!(
415 format!("{:#?}", tree.syntax()).trim(), 415 format!("{:#?}", tree.syntax()).trim(),
416 r#" 416 r#"
@@ -667,9 +667,9 @@ fn test_expr_order() {
667 } 667 }
668"#, 668"#,
669 ); 669 );
670 670 let dump = format!("{:#?}", expand_to_items(&rules, "foo! { 1 + 1 }").syntax());
671 assert_eq!( 671 assert_eq_text!(
672 format!("{:#?}", expand_to_items(&rules, "foo! { 1 + 1 }").syntax()).trim(), 672 dump.trim(),
673 r#"MACRO_ITEMS@[0; 15) 673 r#"MACRO_ITEMS@[0; 15)
674 FN_DEF@[0; 15) 674 FN_DEF@[0; 15)
675 FN_KW@[0; 2) "fn" 675 FN_KW@[0; 2) "fn"
diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs
index 4e6f2f558..0158f9b8a 100644
--- a/crates/ra_parser/src/grammar.rs
+++ b/crates/ra_parser/src/grammar.rs
@@ -210,7 +210,7 @@ fn opt_visibility(p: &mut Parser) -> bool {
210 // 210 //
211 // test crate_keyword_path 211 // test crate_keyword_path
212 // fn foo() { crate::foo(); } 212 // fn foo() { crate::foo(); }
213 T![crate] if p.nth(1) != T![::] => { 213 T![crate] if !p.nth_at(1, T![::]) => {
214 let m = p.start(); 214 let m = p.start();
215 p.bump_any(); 215 p.bump_any();
216 m.complete(p, VISIBILITY); 216 m.complete(p, VISIBILITY);
@@ -245,7 +245,7 @@ fn abi(p: &mut Parser) {
245fn opt_fn_ret_type(p: &mut Parser) -> bool { 245fn opt_fn_ret_type(p: &mut Parser) -> bool {
246 if p.at(T![->]) { 246 if p.at(T![->]) {
247 let m = p.start(); 247 let m = p.start();
248 p.bump_any(); 248 p.bump(T![->]);
249 types::type_(p); 249 types::type_(p);
250 m.complete(p, RET_TYPE); 250 m.complete(p, RET_TYPE);
251 true 251 true
diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs
index 30036eb46..1dd9a586c 100644
--- a/crates/ra_parser/src/grammar/expressions.rs
+++ b/crates/ra_parser/src/grammar/expressions.rs
@@ -14,20 +14,17 @@ const EXPR_FIRST: TokenSet = LHS_FIRST;
14 14
15pub(super) fn expr(p: &mut Parser) -> BlockLike { 15pub(super) fn expr(p: &mut Parser) -> BlockLike {
16 let r = Restrictions { forbid_structs: false, prefer_stmt: false }; 16 let r = Restrictions { forbid_structs: false, prefer_stmt: false };
17 let mut dollar_lvl = 0; 17 expr_bp(p, r, 1).1
18 expr_bp(p, r, 1, &mut dollar_lvl).1
19} 18}
20 19
21pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) { 20pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) {
22 let r = Restrictions { forbid_structs: false, prefer_stmt: true }; 21 let r = Restrictions { forbid_structs: false, prefer_stmt: true };
23 let mut dollar_lvl = 0; 22 expr_bp(p, r, 1)
24 expr_bp(p, r, 1, &mut dollar_lvl)
25} 23}
26 24
27fn expr_no_struct(p: &mut Parser) { 25fn expr_no_struct(p: &mut Parser) {
28 let r = Restrictions { forbid_structs: true, prefer_stmt: false }; 26 let r = Restrictions { forbid_structs: true, prefer_stmt: false };
29 let mut dollar_lvl = 0; 27 expr_bp(p, r, 1);
30 expr_bp(p, r, 1, &mut dollar_lvl);
31} 28}
32 29
33// test block 30// test block
@@ -212,72 +209,53 @@ struct Restrictions {
212 prefer_stmt: bool, 209 prefer_stmt: bool,
213} 210}
214 211
215enum Op { 212/// Binding powers of operators for a Pratt parser.
216 Simple, 213///
217 Composite(SyntaxKind, u8), 214/// See https://www.oilshell.org/blog/2016/11/03.html
218} 215#[rustfmt::skip]
219 216fn current_op(p: &Parser) -> (u8, SyntaxKind) {
220fn current_op(p: &Parser) -> (u8, Op) { 217 const NOT_AN_OP: (u8, SyntaxKind) = (0, T![@]);
221 if let Some(t) = p.current3() { 218 match p.current() {
222 match t { 219 T![|] if p.at(T![||]) => (3, T![||]),
223 (T![<], T![<], T![=]) => return (1, Op::Composite(T![<<=], 3)), 220 T![|] if p.at(T![|=]) => (1, T![|=]),
224 (T![>], T![>], T![=]) => return (1, Op::Composite(T![>>=], 3)), 221 T![|] => (6, T![|]),
225 _ => (), 222 T![>] if p.at(T![>>=]) => (1, T![>>=]),
226 } 223 T![>] if p.at(T![>>]) => (9, T![>>]),
227 } 224 T![>] if p.at(T![>=]) => (5, T![>=]),
228 225 T![>] => (5, T![>]),
229 if let Some(t) = p.current2() { 226 T![=] if p.at(T![=>]) => NOT_AN_OP,
230 match t { 227 T![=] if p.at(T![==]) => (5, T![==]),
231 (T![+], T![=]) => return (1, Op::Composite(T![+=], 2)), 228 T![=] => (1, T![=]),
232 (T![-], T![=]) => return (1, Op::Composite(T![-=], 2)), 229 T![<] if p.at(T![<=]) => (5, T![<=]),
233 (T![*], T![=]) => return (1, Op::Composite(T![*=], 2)), 230 T![<] if p.at(T![<<=]) => (1, T![<<=]),
234 (T![%], T![=]) => return (1, Op::Composite(T![%=], 2)), 231 T![<] if p.at(T![<<]) => (9, T![<<]),
235 (T![/], T![=]) => return (1, Op::Composite(T![/=], 2)), 232 T![<] => (5, T![<]),
236 (T![|], T![=]) => return (1, Op::Composite(T![|=], 2)), 233 T![+] if p.at(T![+=]) => (1, T![+=]),
237 (T![&], T![=]) => return (1, Op::Composite(T![&=], 2)), 234 T![+] => (10, T![+]),
238 (T![^], T![=]) => return (1, Op::Composite(T![^=], 2)), 235 T![^] if p.at(T![^=]) => (1, T![^=]),
239 (T![|], T![|]) => return (3, Op::Composite(T![||], 2)), 236 T![^] => (7, T![^]),
240 (T![&], T![&]) => return (4, Op::Composite(T![&&], 2)), 237 T![%] if p.at(T![%=]) => (1, T![%=]),
241 (T![<], T![=]) => return (5, Op::Composite(T![<=], 2)), 238 T![%] => (11, T![%]),
242 (T![>], T![=]) => return (5, Op::Composite(T![>=], 2)), 239 T![&] if p.at(T![&=]) => (1, T![&=]),
243 (T![<], T![<]) => return (9, Op::Composite(T![<<], 2)), 240 T![&] if p.at(T![&&]) => (4, T![&&]),
244 (T![>], T![>]) => return (9, Op::Composite(T![>>], 2)), 241 T![&] => (8, T![&]),
245 _ => (), 242 T![/] if p.at(T![/=]) => (1, T![/=]),
246 } 243 T![/] => (11, T![/]),
244 T![*] if p.at(T![*=]) => (1, T![*=]),
245 T![*] => (11, T![*]),
246 T![.] if p.at(T![..=]) => (2, T![..=]),
247 T![.] if p.at(T![..]) => (2, T![..]),
248 T![!] if p.at(T![!=]) => (5, T![!=]),
249 T![-] if p.at(T![-=]) => (1, T![-=]),
250 T![-] => (10, T![-]),
251
252 _ => NOT_AN_OP
247 } 253 }
248
249 let bp = match p.current() {
250 T![=] => 1,
251 T![..] | T![..=] => 2,
252 T![==] | T![!=] | T![<] | T![>] => 5,
253 T![|] => 6,
254 T![^] => 7,
255 T![&] => 8,
256 T![-] | T![+] => 10,
257 T![*] | T![/] | T![%] => 11,
258 _ => 0,
259 };
260 (bp, Op::Simple)
261} 254}
262 255
263// Parses expression with binding power of at least bp. 256// Parses expression with binding power of at least bp.
264fn expr_bp( 257fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) {
265 p: &mut Parser, 258 let mut lhs = match lhs(p, r) {
266 r: Restrictions,
267 mut bp: u8,
268 dollar_lvl: &mut usize,
269) -> (Option<CompletedMarker>, BlockLike) {
270 // `newly_dollar_open` is a flag indicated that dollar is just closed after lhs, e.g.
271 // `$1$ + a`
272 // We use this flag to skip handling it.
273 let mut newly_dollar_open = if p.at_l_dollar() {
274 *dollar_lvl += p.eat_l_dollars();
275 true
276 } else {
277 false
278 };
279
280 let mut lhs = match lhs(p, r, dollar_lvl) {
281 Some((lhs, blocklike)) => { 259 Some((lhs, blocklike)) => {
282 // test stmt_bin_expr_ambiguity 260 // test stmt_bin_expr_ambiguity
283 // fn foo() { 261 // fn foo() {
@@ -293,42 +271,23 @@ fn expr_bp(
293 }; 271 };
294 272
295 loop { 273 loop {
296 if *dollar_lvl > 0 && p.at_r_dollar() {
297 *dollar_lvl -= p.eat_r_dollars(*dollar_lvl);
298 if !newly_dollar_open {
299 // We "pump" bp for make it highest priority
300 bp = 255;
301 }
302 newly_dollar_open = false;
303 }
304
305 let is_range = p.at(T![..]) || p.at(T![..=]); 274 let is_range = p.at(T![..]) || p.at(T![..=]);
306 let (op_bp, op) = current_op(p); 275 let (op_bp, op) = current_op(p);
307 if op_bp < bp { 276 if op_bp < bp {
308 break; 277 break;
309 } 278 }
310 let m = lhs.precede(p); 279 let m = lhs.precede(p);
311 match op { 280 p.bump(op);
312 Op::Simple => p.bump_any(),
313 Op::Composite(kind, n) => {
314 p.bump_compound(kind, n);
315 }
316 }
317 281
318 expr_bp(p, r, op_bp + 1, dollar_lvl); 282 expr_bp(p, r, op_bp + 1);
319 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); 283 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
320 } 284 }
321 (Some(lhs), BlockLike::NotBlock) 285 (Some(lhs), BlockLike::NotBlock)
322} 286}
323 287
324const LHS_FIRST: TokenSet = 288const LHS_FIRST: TokenSet = atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOT, MINUS]);
325 atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]);
326 289
327fn lhs( 290fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
328 p: &mut Parser,
329 r: Restrictions,
330 dollar_lvl: &mut usize,
331) -> Option<(CompletedMarker, BlockLike)> {
332 let m; 291 let m;
333 let kind = match p.current() { 292 let kind = match p.current() {
334 // test ref_expr 293 // test ref_expr
@@ -353,17 +312,20 @@ fn lhs(
353 p.bump_any(); 312 p.bump_any();
354 PREFIX_EXPR 313 PREFIX_EXPR
355 } 314 }
356 // test full_range_expr
357 // fn foo() { xs[..]; }
358 T![..] | T![..=] => {
359 m = p.start();
360 p.bump_any();
361 if p.at_ts(EXPR_FIRST) {
362 expr_bp(p, r, 2, dollar_lvl);
363 }
364 return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
365 }
366 _ => { 315 _ => {
316 // test full_range_expr
317 // fn foo() { xs[..]; }
318 for &op in [T![..=], T![..]].iter() {
319 if p.at(op) {
320 m = p.start();
321 p.bump(op);
322 if p.at_ts(EXPR_FIRST) {
323 expr_bp(p, r, 2);
324 }
325 return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
326 }
327 }
328
367 // test expression_after_block 329 // test expression_after_block
368 // fn foo() { 330 // fn foo() {
369 // let mut p = F{x: 5}; 331 // let mut p = F{x: 5};
@@ -374,7 +336,7 @@ fn lhs(
374 return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block()))); 336 return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block())));
375 } 337 }
376 }; 338 };
377 expr_bp(p, r, 255, dollar_lvl); 339 expr_bp(p, r, 255);
378 Some((m.complete(p, kind), BlockLike::NotBlock)) 340 Some((m.complete(p, kind), BlockLike::NotBlock))
379} 341}
380 342
@@ -399,29 +361,13 @@ fn postfix_expr(
399 // } 361 // }
400 T!['('] if allow_calls => call_expr(p, lhs), 362 T!['('] if allow_calls => call_expr(p, lhs),
401 T!['['] if allow_calls => index_expr(p, lhs), 363 T!['['] if allow_calls => index_expr(p, lhs),
402 T![.] if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::]) => { 364 T![.] => match postfix_dot_expr(p, lhs) {
403 method_call_expr(p, lhs) 365 Ok(it) => it,
404 } 366 Err(it) => {
405 T![.] if p.nth(1) == AWAIT_KW => { 367 lhs = it;
406 // test await_expr 368 break;
407 // fn foo() { 369 }
408 // x.await; 370 },
409 // x.0.await;
410 // x.0().await?.hello();
411 // }
412 let m = lhs.precede(p);
413 p.bump_any();
414 p.bump_any();
415 m.complete(p, AWAIT_EXPR)
416 }
417 T![.] => field_expr(p, lhs),
418 // test postfix_range
419 // fn foo() { let x = 1..; }
420 T![..] | T![..=] if !EXPR_FIRST.contains(p.nth(1)) => {
421 let m = lhs.precede(p);
422 p.bump_any();
423 m.complete(p, RANGE_EXPR)
424 }
425 T![?] => try_expr(p, lhs), 371 T![?] => try_expr(p, lhs),
426 T![as] => cast_expr(p, lhs), 372 T![as] => cast_expr(p, lhs),
427 _ => break, 373 _ => break,
@@ -429,7 +375,46 @@ fn postfix_expr(
429 allow_calls = true; 375 allow_calls = true;
430 block_like = BlockLike::NotBlock; 376 block_like = BlockLike::NotBlock;
431 } 377 }
432 (lhs, block_like) 378 return (lhs, block_like);
379
380 fn postfix_dot_expr(
381 p: &mut Parser,
382 lhs: CompletedMarker,
383 ) -> Result<CompletedMarker, CompletedMarker> {
384 assert!(p.at(T![.]));
385 if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) {
386 return Ok(method_call_expr(p, lhs));
387 }
388
389 // test await_expr
390 // fn foo() {
391 // x.await;
392 // x.0.await;
393 // x.0().await?.hello();
394 // }
395 if p.nth(1) == T![await] {
396 let m = lhs.precede(p);
397 p.bump(T![.]);
398 p.bump(T![await]);
399 return Ok(m.complete(p, AWAIT_EXPR));
400 }
401
402 // test postfix_range
403 // fn foo() { let x = 1..; }
404 for &(op, la) in [(T![..=], 3), (T![..], 2)].iter() {
405 if p.at(op) {
406 return if EXPR_FIRST.contains(p.nth(la)) {
407 Err(lhs)
408 } else {
409 let m = lhs.precede(p);
410 p.bump(op);
411 Ok(m.complete(p, RANGE_EXPR))
412 };
413 }
414 }
415
416 Ok(field_expr(p, lhs))
417 }
433} 418}
434 419
435// test call_expr 420// test call_expr
@@ -465,7 +450,7 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
465// y.bar::<T>(1, 2,); 450// y.bar::<T>(1, 2,);
466// } 451// }
467fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { 452fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
468 assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::])); 453 assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])));
469 let m = lhs.precede(p); 454 let m = lhs.precede(p);
470 p.bump_any(); 455 p.bump_any();
471 name_ref(p); 456 name_ref(p);
@@ -567,7 +552,7 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) {
567 record_field_list(p); 552 record_field_list(p);
568 (m.complete(p, RECORD_LIT), BlockLike::NotBlock) 553 (m.complete(p, RECORD_LIT), BlockLike::NotBlock)
569 } 554 }
570 T![!] => { 555 T![!] if !p.at(T![!=]) => {
571 let block_like = items::macro_call_after_excl(p); 556 let block_like = items::macro_call_after_excl(p);
572 (m.complete(p, MACRO_CALL), block_like) 557 (m.complete(p, MACRO_CALL), block_like)
573 } 558 }
@@ -601,8 +586,8 @@ pub(crate) fn record_field_list(p: &mut Parser) {
601 } 586 }
602 m.complete(p, RECORD_FIELD); 587 m.complete(p, RECORD_FIELD);
603 } 588 }
604 T![..] => { 589 T![.] if p.at(T![..]) => {
605 p.bump_any(); 590 p.bump(T![..]);
606 expr(p); 591 expr(p);
607 } 592 }
608 T!['{'] => error_block(p, "expected a field"), 593 T!['{'] => error_block(p, "expected a field"),
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs
index cea79cf6f..6e295fbf9 100644
--- a/crates/ra_parser/src/grammar/expressions/atom.rs
+++ b/crates/ra_parser/src/grammar/expressions/atom.rs
@@ -69,6 +69,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
69 let done = match p.current() { 69 let done = match p.current() {
70 T!['('] => tuple_expr(p), 70 T!['('] => tuple_expr(p),
71 T!['['] => array_expr(p), 71 T!['['] => array_expr(p),
72 L_DOLLAR => meta_var_expr(p),
72 T![|] => lambda_expr(p), 73 T![|] => lambda_expr(p),
73 T![move] if la == T![|] => lambda_expr(p), 74 T![move] if la == T![|] => lambda_expr(p),
74 T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p), 75 T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p),
@@ -554,3 +555,27 @@ fn box_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
554 } 555 }
555 m.complete(p, BOX_EXPR) 556 m.complete(p, BOX_EXPR)
556} 557}
558
559/// Expression from `$var` macro expansion, wrapped in dollars
560fn meta_var_expr(p: &mut Parser) -> CompletedMarker {
561 assert!(p.at(L_DOLLAR));
562 let m = p.start();
563 p.bump(L_DOLLAR);
564 let (completed, _is_block) =
565 expr_bp(p, Restrictions { forbid_structs: false, prefer_stmt: false }, 1);
566
567 match (completed, p.current()) {
568 (Some(it), R_DOLLAR) => {
569 p.bump(R_DOLLAR);
570 m.abandon(p);
571 it
572 }
573 _ => {
574 while !p.at(R_DOLLAR) {
575 p.bump_any()
576 }
577 p.bump(R_DOLLAR);
578 m.complete(p, ERROR)
579 }
580 }
581}
diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs
index f27cc85ff..eff9d67e4 100644
--- a/crates/ra_parser/src/grammar/items.rs
+++ b/crates/ra_parser/src/grammar/items.rs
@@ -422,7 +422,7 @@ pub(crate) fn token_tree(p: &mut Parser) {
422 return; 422 return;
423 } 423 }
424 T![')'] | T![']'] => p.err_and_bump("unmatched brace"), 424 T![')'] | T![']'] => p.err_and_bump("unmatched brace"),
425 _ => p.bump_raw(), 425 _ => p.bump_any(),
426 } 426 }
427 } 427 }
428 p.expect(closing_paren_kind); 428 p.expect(closing_paren_kind);
diff --git a/crates/ra_parser/src/grammar/items/use_item.rs b/crates/ra_parser/src/grammar/items/use_item.rs
index 7a1693a34..f28f522b8 100644
--- a/crates/ra_parser/src/grammar/items/use_item.rs
+++ b/crates/ra_parser/src/grammar/items/use_item.rs
@@ -13,9 +13,8 @@ pub(super) fn use_item(p: &mut Parser, m: Marker) {
13/// so handles both `some::path::{inner::path}` and `inner::path` in 13/// so handles both `some::path::{inner::path}` and `inner::path` in
14/// `use some::path::{inner::path};` 14/// `use some::path::{inner::path};`
15fn use_tree(p: &mut Parser) { 15fn use_tree(p: &mut Parser) {
16 let la = p.nth(1);
17 let m = p.start(); 16 let m = p.start();
18 match (p.current(), la) { 17 match p.current() {
19 // Finish the use_tree for cases of e.g. 18 // Finish the use_tree for cases of e.g.
20 // `use some::path::{self, *};` or `use *;` 19 // `use some::path::{self, *};` or `use *;`
21 // This does not handle cases such as `use some::path::*` 20 // This does not handle cases such as `use some::path::*`
@@ -28,15 +27,15 @@ fn use_tree(p: &mut Parser) {
28 // use ::*; 27 // use ::*;
29 // use some::path::{*}; 28 // use some::path::{*};
30 // use some::path::{::*}; 29 // use some::path::{::*};
31 (T![*], _) => p.bump_any(), 30 T![*] => p.bump(T![*]),
32 (T![::], T![*]) => { 31 T![:] if p.at(T![::]) && p.nth(2) == T![*] => {
33 // Parse `use ::*;`, which imports all from the crate root in Rust 2015 32 // Parse `use ::*;`, which imports all from the crate root in Rust 2015
34 // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) 33 // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`)
35 // but still parses and errors later: ('crate root in paths can only be used in start position') 34 // but still parses and errors later: ('crate root in paths can only be used in start position')
36 // FIXME: Add this error (if not out of scope) 35 // FIXME: Add this error (if not out of scope)
37 // In Rust 2018, it is always invalid (see above) 36 // In Rust 2018, it is always invalid (see above)
38 p.bump_any(); 37 p.bump(T![::]);
39 p.bump_any(); 38 p.bump(T![*]);
40 } 39 }
41 // Open a use tree list 40 // Open a use tree list
42 // Handles cases such as `use {some::path};` or `{inner::path}` in 41 // Handles cases such as `use {some::path};` or `{inner::path}` in
@@ -47,10 +46,11 @@ fn use_tree(p: &mut Parser) {
47 // use {path::from::root}; // Rust 2015 46 // use {path::from::root}; // Rust 2015
48 // use ::{some::arbritrary::path}; // Rust 2015 47 // use ::{some::arbritrary::path}; // Rust 2015
49 // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig 48 // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig
50 (T!['{'], _) | (T![::], T!['{']) => { 49 T!['{'] => {
51 if p.at(T![::]) { 50 use_tree_list(p);
52 p.bump_any(); 51 }
53 } 52 T![:] if p.at(T![::]) && p.nth(2) == T!['{'] => {
53 p.bump(T![::]);
54 use_tree_list(p); 54 use_tree_list(p);
55 } 55 }
56 // Parse a 'standard' path. 56 // Parse a 'standard' path.
@@ -80,8 +80,8 @@ fn use_tree(p: &mut Parser) {
80 // use Trait as _; 80 // use Trait as _;
81 opt_alias(p); 81 opt_alias(p);
82 } 82 }
83 T![::] => { 83 T![:] if p.at(T![::]) => {
84 p.bump_any(); 84 p.bump(T![::]);
85 match p.current() { 85 match p.current() {
86 T![*] => { 86 T![*] => {
87 p.bump_any(); 87 p.bump_any();
diff --git a/crates/ra_parser/src/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs
index 56e457325..5893b22fd 100644
--- a/crates/ra_parser/src/grammar/params.rs
+++ b/crates/ra_parser/src/grammar/params.rs
@@ -80,7 +80,7 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
80 match flavor { 80 match flavor {
81 Flavor::OptionalType | Flavor::Normal => { 81 Flavor::OptionalType | Flavor::Normal => {
82 patterns::pattern(p); 82 patterns::pattern(p);
83 if p.at(T![:]) || flavor.type_required() { 83 if p.at(T![:]) && !p.at(T![::]) || flavor.type_required() {
84 types::ascription(p) 84 types::ascription(p)
85 } 85 }
86 } 86 }
@@ -96,10 +96,11 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
96 // trait Foo { 96 // trait Foo {
97 // fn bar(_: u64, mut x: i32); 97 // fn bar(_: u64, mut x: i32);
98 // } 98 // }
99 if (la0 == IDENT || la0 == T![_]) && la1 == T![:] 99 if (la0 == IDENT || la0 == T![_]) && la1 == T![:] && !p.nth_at(1, T![::])
100 || la0 == T![mut] && la1 == IDENT && la2 == T![:] 100 || la0 == T![mut] && la1 == IDENT && la2 == T![:]
101 || la0 == T![&] && la1 == IDENT && la2 == T![:] 101 || la0 == T![&]
102 || la0 == T![&] && la1 == T![mut] && la2 == IDENT && la3 == T![:] 102 && (la1 == IDENT && la2 == T![:] && !p.nth_at(2, T![::])
103 || la1 == T![mut] && la2 == IDENT && la3 == T![:] && !p.nth_at(3, T![::]))
103 { 104 {
104 patterns::pattern(p); 105 patterns::pattern(p);
105 types::ascription(p); 106 types::ascription(p);
diff --git a/crates/ra_parser/src/grammar/paths.rs b/crates/ra_parser/src/grammar/paths.rs
index 345c93f55..24b65128e 100644
--- a/crates/ra_parser/src/grammar/paths.rs
+++ b/crates/ra_parser/src/grammar/paths.rs
@@ -1,7 +1,7 @@
1use super::*; 1use super::*;
2 2
3pub(super) const PATH_FIRST: TokenSet = 3pub(super) const PATH_FIRST: TokenSet =
4 token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, L_ANGLE]; 4 token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLON, L_ANGLE];
5 5
6pub(super) fn is_path_start(p: &Parser) -> bool { 6pub(super) fn is_path_start(p: &Parser) -> bool {
7 is_use_path_start(p) || p.at(T![<]) 7 is_use_path_start(p) || p.at(T![<])
@@ -9,7 +9,8 @@ pub(super) fn is_path_start(p: &Parser) -> bool {
9 9
10pub(super) fn is_use_path_start(p: &Parser) -> bool { 10pub(super) fn is_use_path_start(p: &Parser) -> bool {
11 match p.current() { 11 match p.current() {
12 IDENT | T![self] | T![super] | T![crate] | T![::] => true, 12 IDENT | T![self] | T![super] | T![crate] => true,
13 T![:] if p.at(T![::]) => true,
13 _ => false, 14 _ => false,
14 } 15 }
15} 16}
@@ -38,13 +39,13 @@ fn path(p: &mut Parser, mode: Mode) {
38 path_segment(p, mode, true); 39 path_segment(p, mode, true);
39 let mut qual = path.complete(p, PATH); 40 let mut qual = path.complete(p, PATH);
40 loop { 41 loop {
41 let use_tree = match p.nth(1) { 42 let use_tree = match p.nth(2) {
42 T![*] | T!['{'] => true, 43 T![*] | T!['{'] => true,
43 _ => false, 44 _ => false,
44 }; 45 };
45 if p.at(T![::]) && !use_tree { 46 if p.at(T![::]) && !use_tree {
46 let path = qual.precede(p); 47 let path = qual.precede(p);
47 p.bump_any(); 48 p.bump(T![::]);
48 path_segment(p, mode, false); 49 path_segment(p, mode, false);
49 let path = path.complete(p, PATH); 50 let path = path.complete(p, PATH);
50 qual = path; 51 qual = path;
diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs
index d2f4296f8..dd1d25b07 100644
--- a/crates/ra_parser/src/grammar/patterns.rs
+++ b/crates/ra_parser/src/grammar/patterns.rs
@@ -34,17 +34,20 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) {
34 // 200 .. 301=> (), 34 // 200 .. 301=> (),
35 // } 35 // }
36 // } 36 // }
37 if p.at(T![...]) || p.at(T![..=]) || p.at(T![..]) { 37 for &range_op in [T![...], T![..=], T![..]].iter() {
38 let m = lhs.precede(p); 38 if p.at(range_op) {
39 p.bump_any(); 39 let m = lhs.precede(p);
40 atom_pat(p, recovery_set); 40 p.bump(range_op);
41 m.complete(p, RANGE_PAT); 41 atom_pat(p, recovery_set);
42 m.complete(p, RANGE_PAT);
43 return;
44 }
42 } 45 }
43 // test marco_pat 46 // test marco_pat
44 // fn main() { 47 // fn main() {
45 // let m!(x) = 0; 48 // let m!(x) = 0;
46 // } 49 // }
47 else if lhs.kind() == PATH_PAT && p.at(T![!]) { 50 if lhs.kind() == PATH_PAT && p.at(T![!]) {
48 let m = lhs.precede(p); 51 let m = lhs.precede(p);
49 items::macro_call_after_excl(p); 52 items::macro_call_after_excl(p);
50 m.complete(p, MACRO_CALL); 53 m.complete(p, MACRO_CALL);
@@ -56,14 +59,16 @@ const PAT_RECOVERY_SET: TokenSet =
56 token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; 59 token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA];
57 60
58fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { 61fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
59 // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro
60 // (T![x]).
61 let is_path_or_macro_pat =
62 |la1| la1 == T![::] || la1 == T!['('] || la1 == T!['{'] || la1 == T![!];
63
64 let m = match p.nth(0) { 62 let m = match p.nth(0) {
65 T![box] => box_pat(p), 63 T![box] => box_pat(p),
66 T![ref] | T![mut] | IDENT if !is_path_or_macro_pat(p.nth(1)) => bind_pat(p, true), 64 T![ref] | T![mut] => bind_pat(p, true),
65 IDENT => match p.nth(1) {
66 // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro
67 // (T![x]).
68 T!['('] | T!['{'] | T![!] => path_pat(p),
69 T![:] if p.nth_at(1, T![::]) => path_pat(p),
70 _ => bind_pat(p, true),
71 },
67 72
68 _ if paths::is_use_path_start(p) => path_pat(p), 73 _ if paths::is_use_path_start(p) => path_pat(p),
69 _ if is_literal_pat_start(p) => literal_pat(p), 74 _ if is_literal_pat_start(p) => literal_pat(p),
@@ -158,7 +163,7 @@ fn record_field_pat_list(p: &mut Parser) {
158 p.bump_any(); 163 p.bump_any();
159 while !p.at(EOF) && !p.at(T!['}']) { 164 while !p.at(EOF) && !p.at(T!['}']) {
160 match p.current() { 165 match p.current() {
161 T![..] => p.bump_any(), 166 T![.] if p.at(T![..]) => p.bump(T![..]),
162 IDENT if p.nth(1) == T![:] => record_field_pat(p), 167 IDENT if p.nth(1) == T![:] => record_field_pat(p),
163 T!['{'] => error_block(p, "expected ident"), 168 T!['{'] => error_block(p, "expected ident"),
164 T![box] => { 169 T![box] => {
@@ -237,7 +242,7 @@ fn slice_pat(p: &mut Parser) -> CompletedMarker {
237fn pat_list(p: &mut Parser, ket: SyntaxKind) { 242fn pat_list(p: &mut Parser, ket: SyntaxKind) {
238 while !p.at(EOF) && !p.at(ket) { 243 while !p.at(EOF) && !p.at(ket) {
239 match p.current() { 244 match p.current() {
240 T![..] => p.bump_any(), 245 T![.] if p.at(T![..]) => p.bump(T![..]),
241 _ => { 246 _ => {
242 if !p.at_ts(PATTERN_FIRST) { 247 if !p.at_ts(PATTERN_FIRST) {
243 p.error("expected a pattern"); 248 p.error("expected a pattern");
diff --git a/crates/ra_parser/src/grammar/type_args.rs b/crates/ra_parser/src/grammar/type_args.rs
index e100af531..edc7d4ff2 100644
--- a/crates/ra_parser/src/grammar/type_args.rs
+++ b/crates/ra_parser/src/grammar/type_args.rs
@@ -2,19 +2,16 @@ use super::*;
2 2
3pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) { 3pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) {
4 let m; 4 let m;
5 match (colon_colon_required, p.nth(0), p.nth(1)) { 5 if p.at(T![::]) && p.nth(2) == T![<] {
6 (_, T![::], T![<]) => { 6 m = p.start();
7 m = p.start(); 7 p.bump(T![::]);
8 p.bump_any(); 8 p.bump(T![<]);
9 p.bump_any(); 9 } else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] {
10 } 10 m = p.start();
11 (false, T![<], T![=]) => return, 11 p.bump(T![<]);
12 (false, T![<], _) => { 12 } else {
13 m = p.start(); 13 return;
14 p.bump_any(); 14 }
15 }
16 _ => return,
17 };
18 15
19 while !p.at(EOF) && !p.at(T![>]) { 16 while !p.at(EOF) && !p.at(T![>]) {
20 type_arg(p); 17 type_arg(p);
@@ -37,7 +34,7 @@ fn type_arg(p: &mut Parser) {
37 } 34 }
38 // test associated_type_bounds 35 // test associated_type_bounds
39 // fn print_all<T: Iterator<Item: Display>>(printables: T) {} 36 // fn print_all<T: Iterator<Item: Display>>(printables: T) {}
40 IDENT if p.nth(1) == T![:] => { 37 IDENT if p.nth(1) == T![:] && p.nth(2) != T![:] => {
41 name_ref(p); 38 name_ref(p);
42 type_params::bounds(p); 39 type_params::bounds(p);
43 m.complete(p, ASSOC_TYPE_ARG); 40 m.complete(p, ASSOC_TYPE_ARG);
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs
index d8567e84b..a2ac363fb 100644
--- a/crates/ra_parser/src/parser.rs
+++ b/crates/ra_parser/src/parser.rs
@@ -5,8 +5,8 @@ use drop_bomb::DropBomb;
5use crate::{ 5use crate::{
6 event::Event, 6 event::Event,
7 ParseError, 7 ParseError,
8 SyntaxKind::{self, EOF, ERROR, TOMBSTONE}, 8 SyntaxKind::{self, EOF, ERROR, L_DOLLAR, R_DOLLAR, TOMBSTONE},
9 Token, TokenSet, TokenSource, T, 9 TokenSet, TokenSource, T,
10}; 10};
11 11
12/// `Parser` struct provides the low-level API for 12/// `Parser` struct provides the low-level API for
@@ -40,38 +40,6 @@ impl<'t> Parser<'t> {
40 self.nth(0) 40 self.nth(0)
41 } 41 }
42 42
43 /// Returns the kinds of the current two tokens, if they are not separated
44 /// by trivia.
45 ///
46 /// Useful for parsing things like `>>`.
47 pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> {
48 let c1 = self.nth(0);
49 let c2 = self.nth(1);
50
51 if self.token_source.current().is_jointed_to_next {
52 Some((c1, c2))
53 } else {
54 None
55 }
56 }
57
58 /// Returns the kinds of the current three tokens, if they are not separated
59 /// by trivia.
60 ///
61 /// Useful for parsing things like `=>>`.
62 pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> {
63 let c1 = self.nth(0);
64 let c2 = self.nth(1);
65 let c3 = self.nth(2);
66 if self.token_source.current().is_jointed_to_next
67 && self.token_source.lookahead_nth(1).is_jointed_to_next
68 {
69 Some((c1, c2, c3))
70 } else {
71 None
72 }
73 }
74
75 /// Lookahead operation: returns the kind of the next nth 43 /// Lookahead operation: returns the kind of the next nth
76 /// token. 44 /// token.
77 pub(crate) fn nth(&self, n: usize) -> SyntaxKind { 45 pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
@@ -81,33 +49,93 @@ impl<'t> Parser<'t> {
81 assert!(steps <= 10_000_000, "the parser seems stuck"); 49 assert!(steps <= 10_000_000, "the parser seems stuck");
82 self.steps.set(steps + 1); 50 self.steps.set(steps + 1);
83 51
84 // It is beecause the Dollar will appear between nth 52 self.token_source.lookahead_nth(n).kind
85 // Following code skips through it
86 let mut non_dollars_count = 0;
87 let mut i = 0;
88
89 loop {
90 let token = self.token_source.lookahead_nth(i);
91 let mut kind = token.kind;
92 if let Some((composited, step)) = self.is_composite(token, i) {
93 kind = composited;
94 i += step;
95 } else {
96 i += 1;
97 }
98
99 match kind {
100 EOF => return EOF,
101 SyntaxKind::L_DOLLAR | SyntaxKind::R_DOLLAR => {}
102 _ if non_dollars_count == n => return kind,
103 _ => non_dollars_count += 1,
104 }
105 }
106 } 53 }
107 54
108 /// Checks if the current token is `kind`. 55 /// Checks if the current token is `kind`.
109 pub(crate) fn at(&self, kind: SyntaxKind) -> bool { 56 pub(crate) fn at(&self, kind: SyntaxKind) -> bool {
110 self.current() == kind 57 self.nth_at(0, kind)
58 }
59
60 pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
61 match kind {
62 T![-=] => self.at_composite2(n, T![-], T![=]),
63 T![->] => self.at_composite2(n, T![-], T![>]),
64 T![::] => self.at_composite2(n, T![:], T![:]),
65 T![!=] => self.at_composite2(n, T![!], T![=]),
66 T![..] => self.at_composite2(n, T![.], T![.]),
67 T![*=] => self.at_composite2(n, T![*], T![=]),
68 T![/=] => self.at_composite2(n, T![/], T![=]),
69 T![&&] => self.at_composite2(n, T![&], T![&]),
70 T![&=] => self.at_composite2(n, T![&], T![=]),
71 T![%=] => self.at_composite2(n, T![%], T![=]),
72 T![^=] => self.at_composite2(n, T![^], T![=]),
73 T![+=] => self.at_composite2(n, T![+], T![=]),
74 T![<<] => self.at_composite2(n, T![<], T![<]),
75 T![<=] => self.at_composite2(n, T![<], T![=]),
76 T![==] => self.at_composite2(n, T![=], T![=]),
77 T![=>] => self.at_composite2(n, T![=], T![>]),
78 T![>=] => self.at_composite2(n, T![>], T![=]),
79 T![>>] => self.at_composite2(n, T![>], T![>]),
80 T![|=] => self.at_composite2(n, T![|], T![=]),
81 T![||] => self.at_composite2(n, T![|], T![|]),
82
83 T![...] => self.at_composite3(n, T![.], T![.], T![.]),
84 T![..=] => self.at_composite3(n, T![.], T![.], T![=]),
85 T![<<=] => self.at_composite3(n, T![<], T![<], T![=]),
86 T![>>=] => self.at_composite3(n, T![>], T![>], T![=]),
87
88 _ => self.token_source.lookahead_nth(n).kind == kind,
89 }
90 }
91
92 /// Consume the next token if `kind` matches.
93 pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
94 if !self.at(kind) {
95 return false;
96 }
97 let n_raw_tokens = match kind {
98 T![-=]
99 | T![->]
100 | T![::]
101 | T![!=]
102 | T![..]
103 | T![*=]
104 | T![/=]
105 | T![&&]
106 | T![&=]
107 | T![%=]
108 | T![^=]
109 | T![+=]
110 | T![<<]
111 | T![<=]
112 | T![==]
113 | T![=>]
114 | T![>=]
115 | T![>>]
116 | T![|=]
117 | T![||] => 2,
118
119 T![...] | T![..=] | T![<<=] | T![>>=] => 3,
120 _ => 1,
121 };
122 self.do_bump(kind, n_raw_tokens);
123 true
124 }
125
126 fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
127 let t1 = self.token_source.lookahead_nth(n + 0);
128 let t2 = self.token_source.lookahead_nth(n + 1);
129 t1.kind == k1 && t1.is_jointed_to_next && t2.kind == k2
130 }
131
132 fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
133 let t1 = self.token_source.lookahead_nth(n + 0);
134 let t2 = self.token_source.lookahead_nth(n + 1);
135 let t3 = self.token_source.lookahead_nth(n + 2);
136 (t1.kind == k1 && t1.is_jointed_to_next)
137 && (t2.kind == k2 && t2.is_jointed_to_next)
138 && t3.kind == k3
111 } 139 }
112 140
113 /// Checks if the current token is in `kinds`. 141 /// Checks if the current token is in `kinds`.
@@ -129,22 +157,9 @@ impl<'t> Parser<'t> {
129 Marker::new(pos) 157 Marker::new(pos)
130 } 158 }
131 159
132 /// Advances the parser by one token unconditionally 160 /// Consume the next token if `kind` matches.
133 /// Mainly use in `token_tree` parsing 161 pub(crate) fn bump(&mut self, kind: SyntaxKind) {
134 pub(crate) fn bump_raw(&mut self) { 162 assert!(self.eat(kind));
135 let mut kind = self.token_source.current().kind;
136
137 // Skip dollars, do_bump will eat these later
138 let mut i = 0;
139 while kind == SyntaxKind::L_DOLLAR || kind == SyntaxKind::R_DOLLAR {
140 kind = self.token_source.lookahead_nth(i).kind;
141 i += 1;
142 }
143
144 if kind == EOF {
145 return;
146 }
147 self.do_bump(kind, 1);
148 } 163 }
149 164
150 /// Advances the parser by one token with composite puncts handled 165 /// Advances the parser by one token with composite puncts handled
@@ -153,27 +168,7 @@ impl<'t> Parser<'t> {
153 if kind == EOF { 168 if kind == EOF {
154 return; 169 return;
155 } 170 }
156 171 self.do_bump(kind, 1)
157 use SyntaxKind::*;
158
159 // Handle parser composites
160 match kind {
161 T![...] | T![..=] => {
162 self.bump_compound(kind, 3);
163 }
164 T![..] | T![::] | T![==] | T![=>] | T![!=] | T![->] => {
165 self.bump_compound(kind, 2);
166 }
167 _ => {
168 self.do_bump(kind, 1);
169 }
170 }
171 }
172
173 /// Advances the parser by one token, asserting that it is exactly the expected token
174 pub(crate) fn bump(&mut self, expected: SyntaxKind) {
175 debug_assert!(self.nth(0) == expected);
176 self.bump_any()
177 } 172 }
178 173
179 /// Advances the parser by one token, remapping its kind. 174 /// Advances the parser by one token, remapping its kind.
@@ -190,13 +185,6 @@ impl<'t> Parser<'t> {
190 self.do_bump(kind, 1); 185 self.do_bump(kind, 1);
191 } 186 }
192 187
193 /// Advances the parser by `n` tokens, remapping its kind.
194 /// This is useful to create compound tokens from parts. For
195 /// example, an `<<` token is two consecutive remapped `<` tokens
196 pub(crate) fn bump_compound(&mut self, kind: SyntaxKind, n: u8) {
197 self.do_bump(kind, n);
198 }
199
200 /// Emit error with the `message` 188 /// Emit error with the `message`
201 /// FIXME: this should be much more fancy and support 189 /// FIXME: this should be much more fancy and support
202 /// structured errors with spans and notes, like rustc 190 /// structured errors with spans and notes, like rustc
@@ -206,15 +194,6 @@ impl<'t> Parser<'t> {
206 self.push_event(Event::Error { msg }) 194 self.push_event(Event::Error { msg })
207 } 195 }
208 196
209 /// Consume the next token if `kind` matches.
210 pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
211 if !self.at(kind) {
212 return false;
213 }
214 self.bump_any();
215 true
216 }
217
218 /// Consume the next token if it is `kind` or emit an error 197 /// Consume the next token if it is `kind` or emit an error
219 /// otherwise. 198 /// otherwise.
220 pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool { 199 pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
@@ -232,19 +211,26 @@ impl<'t> Parser<'t> {
232 211
233 /// Create an error node and consume the next token. 212 /// Create an error node and consume the next token.
234 pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { 213 pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
235 if self.at(T!['{']) || self.at(T!['}']) || self.at_ts(recovery) { 214 match self.current() {
236 self.error(message); 215 T!['{'] | T!['}'] | L_DOLLAR | R_DOLLAR => {
237 } else { 216 self.error(message);
238 let m = self.start(); 217 return;
218 }
219 _ => (),
220 }
221
222 if self.at_ts(recovery) {
239 self.error(message); 223 self.error(message);
240 self.bump_any(); 224 return;
241 m.complete(self, ERROR); 225 }
242 }; 226
227 let m = self.start();
228 self.error(message);
229 self.bump_any();
230 m.complete(self, ERROR);
243 } 231 }
244 232
245 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { 233 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
246 self.eat_dollars();
247
248 for _ in 0..n_raw_tokens { 234 for _ in 0..n_raw_tokens {
249 self.token_source.bump(); 235 self.token_source.bump();
250 } 236 }
@@ -255,110 +241,6 @@ impl<'t> Parser<'t> {
255 fn push_event(&mut self, event: Event) { 241 fn push_event(&mut self, event: Event) {
256 self.events.push(event) 242 self.events.push(event)
257 } 243 }
258
259 /// helper function for check if it is composite.
260 fn is_composite(&self, first: Token, n: usize) -> Option<(SyntaxKind, usize)> {
261 // We assume the dollars will not occuried between
262 // mult-byte tokens
263
264 let jn1 = first.is_jointed_to_next;
265 if !jn1 && first.kind != T![-] {
266 return None;
267 }
268
269 let second = self.token_source.lookahead_nth(n + 1);
270 if first.kind == T![-] && second.kind == T![>] {
271 return Some((T![->], 2));
272 }
273 if !jn1 {
274 return None;
275 }
276
277 match (first.kind, second.kind) {
278 (T![:], T![:]) => return Some((T![::], 2)),
279 (T![=], T![=]) => return Some((T![==], 2)),
280 (T![=], T![>]) => return Some((T![=>], 2)),
281 (T![!], T![=]) => return Some((T![!=], 2)),
282 _ => {}
283 }
284
285 if first.kind != T![.] || second.kind != T![.] {
286 return None;
287 }
288
289 let third = self.token_source.lookahead_nth(n + 2);
290
291 let jn2 = second.is_jointed_to_next;
292 let la3 = third.kind;
293
294 if jn2 && la3 == T![.] {
295 return Some((T![...], 3));
296 }
297 if la3 == T![=] {
298 return Some((T![..=], 3));
299 }
300 return Some((T![..], 2));
301 }
302
303 fn eat_dollars(&mut self) {
304 loop {
305 match self.token_source.current().kind {
306 k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => {
307 self.token_source.bump();
308 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
309 }
310 _ => {
311 return;
312 }
313 }
314 }
315 }
316
317 pub(crate) fn eat_l_dollars(&mut self) -> usize {
318 let mut ate_count = 0;
319 loop {
320 match self.token_source.current().kind {
321 k @ SyntaxKind::L_DOLLAR => {
322 self.token_source.bump();
323 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
324 ate_count += 1;
325 }
326 _ => {
327 return ate_count;
328 }
329 }
330 }
331 }
332
333 pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize {
334 let mut ate_count = 0;
335 loop {
336 match self.token_source.current().kind {
337 k @ SyntaxKind::R_DOLLAR => {
338 self.token_source.bump();
339 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
340 ate_count += 1;
341
342 if max_count >= ate_count {
343 return ate_count;
344 }
345 }
346 _ => {
347 return ate_count;
348 }
349 }
350 }
351 }
352
353 pub(crate) fn at_l_dollar(&self) -> bool {
354 let kind = self.token_source.current().kind;
355 (kind == SyntaxKind::L_DOLLAR)
356 }
357
358 pub(crate) fn at_r_dollar(&self) -> bool {
359 let kind = self.token_source.current().kind;
360 (kind == SyntaxKind::R_DOLLAR)
361 }
362} 244}
363 245
364/// See `Parser::start`. 246/// See `Parser::start`.
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs
index fa5d2d5d8..458920607 100644
--- a/crates/ra_syntax/src/tests.rs
+++ b/crates/ra_syntax/src/tests.rs
@@ -16,6 +16,18 @@ fn lexer_tests() {
16} 16}
17 17
18#[test] 18#[test]
19fn parse_smoke_test() {
20 let code = r##"
21fn main() {
22 println!("Hello, world!")
23}
24 "##;
25
26 let parse = SourceFile::parse(code);
27 assert!(parse.ok().is_ok());
28}
29
30#[test]
19fn parser_tests() { 31fn parser_tests() {
20 dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { 32 dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
21 let parse = SourceFile::parse(text); 33 let parse = SourceFile::parse(text);
@@ -75,7 +87,9 @@ fn self_hosting_parsing() {
75 { 87 {
76 count += 1; 88 count += 1;
77 let text = read_text(entry.path()); 89 let text = read_text(entry.path());
78 SourceFile::parse(&text).ok().expect("There should be no errors in the file"); 90 if let Err(errors) = SourceFile::parse(&text).ok() {
91 panic!("Parsing errors:\n{:?}\n{}\n", errors, entry.path().display());
92 }
79 } 93 }
80 assert!( 94 assert!(
81 count > 30, 95 count > 30,
diff --git a/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs b/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs
new file mode 100644
index 000000000..0d3f5722a
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.rs
@@ -0,0 +1,5 @@
1fn a() -> Foo<bar::Baz> {}
2
3fn b(_: impl FnMut(x::Y)) {}
4
5fn c(_: impl FnMut(&x::Y)) {}
diff --git a/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.txt b/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.txt
new file mode 100644
index 000000000..7e1af254c
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/ok/0054_qual_path_in_type_arg.txt
@@ -0,0 +1,126 @@
1SOURCE_FILE@[0; 88)
2 FN_DEF@[0; 26)
3 FN_KW@[0; 2) "fn"
4 WHITESPACE@[2; 3) " "
5 NAME@[3; 4)
6 IDENT@[3; 4) "a"
7 PARAM_LIST@[4; 6)
8 L_PAREN@[4; 5) "("
9 R_PAREN@[5; 6) ")"
10 WHITESPACE@[6; 7) " "
11 RET_TYPE@[7; 23)
12 THIN_ARROW@[7; 9) "->"
13 WHITESPACE@[9; 10) " "
14 PATH_TYPE@[10; 23)
15 PATH@[10; 23)
16 PATH_SEGMENT@[10; 23)
17 NAME_REF@[10; 13)
18 IDENT@[10; 13) "Foo"
19 TYPE_ARG_LIST@[13; 23)
20 L_ANGLE@[13; 14) "<"
21 TYPE_ARG@[14; 22)
22 PATH_TYPE@[14; 22)
23 PATH@[14; 22)
24 PATH@[14; 17)
25 PATH_SEGMENT@[14; 17)
26 NAME_REF@[14; 17)
27 IDENT@[14; 17) "bar"
28 COLONCOLON@[17; 19) "::"
29 PATH_SEGMENT@[19; 22)
30 NAME_REF@[19; 22)
31 IDENT@[19; 22) "Baz"
32 R_ANGLE@[22; 23) ">"
33 WHITESPACE@[23; 24) " "
34 BLOCK_EXPR@[24; 26)
35 BLOCK@[24; 26)
36 L_CURLY@[24; 25) "{"
37 R_CURLY@[25; 26) "}"
38 WHITESPACE@[26; 28) "\n\n"
39 FN_DEF@[28; 56)
40 FN_KW@[28; 30) "fn"
41 WHITESPACE@[30; 31) " "
42 NAME@[31; 32)
43 IDENT@[31; 32) "b"
44 PARAM_LIST@[32; 53)
45 L_PAREN@[32; 33) "("
46 PARAM@[33; 52)
47 PLACEHOLDER_PAT@[33; 34)
48 UNDERSCORE@[33; 34) "_"
49 COLON@[34; 35) ":"
50 WHITESPACE@[35; 36) " "
51 IMPL_TRAIT_TYPE@[36; 52)
52 IMPL_KW@[36; 40) "impl"
53 WHITESPACE@[40; 41) " "
54 TYPE_BOUND_LIST@[41; 52)
55 TYPE_BOUND@[41; 52)
56 PATH_TYPE@[41; 52)
57 PATH@[41; 52)
58 PATH_SEGMENT@[41; 52)
59 NAME_REF@[41; 46)
60 IDENT@[41; 46) "FnMut"
61 PARAM_LIST@[46; 52)
62 L_PAREN@[46; 47) "("
63 PARAM@[47; 51)
64 PATH_TYPE@[47; 51)
65 PATH@[47; 51)
66 PATH@[47; 48)
67 PATH_SEGMENT@[47; 48)
68 NAME_REF@[47; 48)
69 IDENT@[47; 48) "x"
70 COLONCOLON@[48; 50) "::"
71 PATH_SEGMENT@[50; 51)
72 NAME_REF@[50; 51)
73 IDENT@[50; 51) "Y"
74 R_PAREN@[51; 52) ")"
75 R_PAREN@[52; 53) ")"
76 WHITESPACE@[53; 54) " "
77 BLOCK_EXPR@[54; 56)
78 BLOCK@[54; 56)
79 L_CURLY@[54; 55) "{"
80 R_CURLY@[55; 56) "}"
81 WHITESPACE@[56; 58) "\n\n"
82 FN_DEF@[58; 87)
83 FN_KW@[58; 60) "fn"
84 WHITESPACE@[60; 61) " "
85 NAME@[61; 62)
86 IDENT@[61; 62) "c"
87 PARAM_LIST@[62; 84)
88 L_PAREN@[62; 63) "("
89 PARAM@[63; 83)
90 PLACEHOLDER_PAT@[63; 64)
91 UNDERSCORE@[63; 64) "_"
92 COLON@[64; 65) ":"
93 WHITESPACE@[65; 66) " "
94 IMPL_TRAIT_TYPE@[66; 83)
95 IMPL_KW@[66; 70) "impl"
96 WHITESPACE@[70; 71) " "
97 TYPE_BOUND_LIST@[71; 83)
98 TYPE_BOUND@[71; 83)
99 PATH_TYPE@[71; 83)
100 PATH@[71; 83)
101 PATH_SEGMENT@[71; 83)
102 NAME_REF@[71; 76)
103 IDENT@[71; 76) "FnMut"
104 PARAM_LIST@[76; 83)
105 L_PAREN@[76; 77) "("
106 PARAM@[77; 82)
107 REFERENCE_TYPE@[77; 82)
108 AMP@[77; 78) "&"
109 PATH_TYPE@[78; 82)
110 PATH@[78; 82)
111 PATH@[78; 79)
112 PATH_SEGMENT@[78; 79)
113 NAME_REF@[78; 79)
114 IDENT@[78; 79) "x"
115 COLONCOLON@[79; 81) "::"
116 PATH_SEGMENT@[81; 82)
117 NAME_REF@[81; 82)
118 IDENT@[81; 82) "Y"
119 R_PAREN@[82; 83) ")"
120 R_PAREN@[83; 84) ")"
121 WHITESPACE@[84; 85) " "
122 BLOCK_EXPR@[85; 87)
123 BLOCK@[85; 87)
124 L_CURLY@[85; 86) "{"
125 R_CURLY@[86; 87) "}"
126 WHITESPACE@[87; 88) "\n"
diff --git a/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rs b/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rs
new file mode 100644
index 000000000..cd204f65e
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.rs
@@ -0,0 +1,5 @@
1type X = ();
2
3fn main() {
4 let ():::X = ();
5}
diff --git a/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.txt b/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.txt
new file mode 100644
index 000000000..d656e74b1
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/ok/0055_dot_dot_dot.txt
@@ -0,0 +1,50 @@
1SOURCE_FILE@[0; 49)
2 TYPE_ALIAS_DEF@[0; 12)
3 TYPE_KW@[0; 4) "type"
4 WHITESPACE@[4; 5) " "
5 NAME@[5; 6)
6 IDENT@[5; 6) "X"
7 WHITESPACE@[6; 7) " "
8 EQ@[7; 8) "="
9 WHITESPACE@[8; 9) " "
10 TUPLE_TYPE@[9; 11)
11 L_PAREN@[9; 10) "("
12 R_PAREN@[10; 11) ")"
13 SEMI@[11; 12) ";"
14 WHITESPACE@[12; 14) "\n\n"
15 FN_DEF@[14; 48)
16 FN_KW@[14; 16) "fn"
17 WHITESPACE@[16; 17) " "
18 NAME@[17; 21)
19 IDENT@[17; 21) "main"
20 PARAM_LIST@[21; 23)
21 L_PAREN@[21; 22) "("
22 R_PAREN@[22; 23) ")"
23 WHITESPACE@[23; 24) " "
24 BLOCK_EXPR@[24; 48)
25 BLOCK@[24; 48)
26 L_CURLY@[24; 25) "{"
27 WHITESPACE@[25; 30) "\n "
28 LET_STMT@[30; 46)
29 LET_KW@[30; 33) "let"
30 WHITESPACE@[33; 34) " "
31 TUPLE_PAT@[34; 36)
32 L_PAREN@[34; 35) "("
33 R_PAREN@[35; 36) ")"
34 COLON@[36; 37) ":"
35 PATH_TYPE@[37; 40)
36 PATH@[37; 40)
37 PATH_SEGMENT@[37; 40)
38 COLONCOLON@[37; 39) "::"
39 NAME_REF@[39; 40)
40 IDENT@[39; 40) "X"
41 WHITESPACE@[40; 41) " "
42 EQ@[41; 42) "="
43 WHITESPACE@[42; 43) " "
44 TUPLE_EXPR@[43; 45)
45 L_PAREN@[43; 44) "("
46 R_PAREN@[44; 45) ")"
47 SEMI@[45; 46) ";"
48 WHITESPACE@[46; 47) "\n"
49 R_CURLY@[47; 48) "}"
50 WHITESPACE@[48; 49) "\n"
diff --git a/crates/ra_tools/Cargo.toml b/crates/ra_tools/Cargo.toml
index b94a0b18d..848ca408d 100644
--- a/crates/ra_tools/Cargo.toml
+++ b/crates/ra_tools/Cargo.toml
@@ -8,7 +8,7 @@ publish = false
8[dependencies] 8[dependencies]
9walkdir = "2.1.3" 9walkdir = "2.1.3"
10itertools = "0.8.0" 10itertools = "0.8.0"
11clap = { version = "2.32.0", default-features = false } 11pico-args = "0.2.0"
12quote = "1.0.2" 12quote = "1.0.2"
13proc-macro2 = "1.0.1" 13proc-macro2 = "1.0.1"
14ron = "0.5.1" 14ron = "0.5.1"
diff --git a/crates/ra_tools/src/help.rs b/crates/ra_tools/src/help.rs
new file mode 100644
index 000000000..6dde6c2d2
--- /dev/null
+++ b/crates/ra_tools/src/help.rs
@@ -0,0 +1,45 @@
1pub const GLOBAL_HELP: &str = "tasks
2
3USAGE:
4 ra_tools <SUBCOMMAND>
5
6FLAGS:
7 -h, --help Prints help information
8
9SUBCOMMANDS:
10 format
11 format-hook
12 fuzz-tests
13 gen-syntax
14 gen-tests
15 install-ra
16 lint";
17
18pub const INSTALL_RA_HELP: &str = "ra_tools-install-ra
19
20USAGE:
21 ra_tools.exe install-ra [FLAGS]
22
23FLAGS:
24 --client-code
25 -h, --help Prints help information
26 --jemalloc
27 --server";
28
29pub fn print_no_param_subcommand_help(subcommand: &str) {
30 eprintln!(
31 "ra_tools-{}
32
33USAGE:
34 ra_tools {}
35
36FLAGS:
37 -h, --help Prints help information",
38 subcommand, subcommand
39 );
40}
41
42pub const INSTALL_RA_CONFLICT: &str =
43 "error: The argument `--server` cannot be used with `--client-code`
44
45For more information try --help";
diff --git a/crates/ra_tools/src/main.rs b/crates/ra_tools/src/main.rs
index 33badf290..f96f1875f 100644
--- a/crates/ra_tools/src/main.rs
+++ b/crates/ra_tools/src/main.rs
@@ -1,5 +1,8 @@
1use clap::{App, Arg, SubCommand}; 1mod help;
2
3use core::fmt::Write;
2use core::str; 4use core::str;
5use pico_args::Arguments;
3use ra_tools::{ 6use ra_tools::{
4 gen_tests, generate_boilerplate, install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt, 7 gen_tests, generate_boilerplate, install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt,
5 Cmd, Overwrite, Result, 8 Cmd, Overwrite, Result,
@@ -20,45 +23,95 @@ struct ServerOpt {
20} 23}
21 24
22fn main() -> Result<()> { 25fn main() -> Result<()> {
23 let matches = App::new("tasks") 26 let subcommand = match std::env::args_os().nth(1) {
24 .setting(clap::AppSettings::SubcommandRequiredElseHelp) 27 None => {
25 .subcommand(SubCommand::with_name("gen-syntax")) 28 eprintln!("{}", help::GLOBAL_HELP);
26 .subcommand(SubCommand::with_name("gen-tests")) 29 return Ok(());
27 .subcommand( 30 }
28 SubCommand::with_name("install-ra") 31 Some(s) => s,
29 .arg(Arg::with_name("server").long("--server")) 32 };
30 .arg(Arg::with_name("jemalloc").long("jemalloc")) 33 let mut matches = Arguments::from_vec(std::env::args_os().skip(2).collect());
31 .arg(Arg::with_name("client-code").long("client-code").conflicts_with("server")), 34 let subcommand = &*subcommand.to_string_lossy();
32 ) 35 match subcommand {
33 .alias("install-code") 36 "install-ra" | "install-code" => {
34 .subcommand(SubCommand::with_name("format")) 37 if matches.contains(["-h", "--help"]) {
35 .subcommand(SubCommand::with_name("format-hook")) 38 eprintln!("{}", help::INSTALL_RA_HELP);
36 .subcommand(SubCommand::with_name("fuzz-tests")) 39 return Ok(());
37 .subcommand(SubCommand::with_name("lint")) 40 }
38 .get_matches(); 41 let server = matches.contains("--server");
39 match matches.subcommand() { 42 let client_code = matches.contains("--client-code");
40 ("install-ra", Some(matches)) => { 43 if server && client_code {
44 eprintln!("{}", help::INSTALL_RA_CONFLICT);
45 return Ok(());
46 }
47 let jemalloc = matches.contains("--jemalloc");
48 matches.finish().or_else(handle_extra_flags)?;
41 let opts = InstallOpt { 49 let opts = InstallOpt {
42 client: if matches.is_present("server") { None } else { Some(ClientOpt::VsCode) }, 50 client: if server { None } else { Some(ClientOpt::VsCode) },
43 server: if matches.is_present("client-code") { 51 server: if client_code { None } else { Some(ServerOpt { jemalloc: jemalloc }) },
44 None
45 } else {
46 Some(ServerOpt { jemalloc: matches.is_present("jemalloc") })
47 },
48 }; 52 };
49 install(opts)? 53 install(opts)?
50 } 54 }
51 ("gen-tests", _) => gen_tests(Overwrite)?, 55 "gen-tests" => {
52 ("gen-syntax", _) => generate_boilerplate(Overwrite)?, 56 if matches.contains(["-h", "--help"]) {
53 ("format", _) => run_rustfmt(Overwrite)?, 57 help::print_no_param_subcommand_help(&subcommand);
54 ("format-hook", _) => install_format_hook()?, 58 return Ok(());
55 ("lint", _) => run_clippy()?, 59 }
56 ("fuzz-tests", _) => run_fuzzer()?, 60 gen_tests(Overwrite)?
57 _ => unreachable!(), 61 }
62 "gen-syntax" => {
63 if matches.contains(["-h", "--help"]) {
64 help::print_no_param_subcommand_help(&subcommand);
65 return Ok(());
66 }
67 generate_boilerplate(Overwrite)?
68 }
69 "format" => {
70 if matches.contains(["-h", "--help"]) {
71 help::print_no_param_subcommand_help(&subcommand);
72 return Ok(());
73 }
74 run_rustfmt(Overwrite)?
75 }
76 "format-hook" => {
77 if matches.contains(["-h", "--help"]) {
78 help::print_no_param_subcommand_help(&subcommand);
79 return Ok(());
80 }
81 install_format_hook()?
82 }
83 "lint" => {
84 if matches.contains(["-h", "--help"]) {
85 help::print_no_param_subcommand_help(&subcommand);
86 return Ok(());
87 }
88 run_clippy()?
89 }
90 "fuzz-tests" => {
91 if matches.contains(["-h", "--help"]) {
92 help::print_no_param_subcommand_help(&subcommand);
93 return Ok(());
94 }
95 run_fuzzer()?
96 }
97 _ => eprintln!("{}", help::GLOBAL_HELP),
58 } 98 }
59 Ok(()) 99 Ok(())
60} 100}
61 101
102fn handle_extra_flags(e: pico_args::Error) -> Result<()> {
103 if let pico_args::Error::UnusedArgsLeft(flags) = e {
104 let mut invalid_flags = String::new();
105 for flag in flags {
106 write!(&mut invalid_flags, "{}, ", flag)?;
107 }
108 let (invalid_flags, _) = invalid_flags.split_at(invalid_flags.len() - 2);
109 Err(format!("Invalid flags: {}", invalid_flags).into())
110 } else {
111 Err(e.to_string().into())
112 }
113}
114
62fn install(opts: InstallOpt) -> Result<()> { 115fn install(opts: InstallOpt) -> Result<()> {
63 if cfg!(target_os = "macos") { 116 if cfg!(target_os = "macos") {
64 fix_path_for_mac()? 117 fix_path_for_mac()?
diff --git a/docs/user/README.md b/docs/user/README.md
index 8205fa404..5101e49b8 100644
--- a/docs/user/README.md
+++ b/docs/user/README.md
@@ -130,7 +130,7 @@ Installation:
130 130
131```json 131```json
132"rust-analyzer": { 132"rust-analyzer": {
133 "command": ["rustup", "run", "stable", "ra_lsp_server"], 133 "command": ["ra_lsp_server"],
134 "languageId": "rust", 134 "languageId": "rust",
135 "scopes": ["source.rust"], 135 "scopes": ["source.rust"],
136 "syntaxes": [ 136 "syntaxes": [
@@ -141,3 +141,5 @@ Installation:
141``` 141```
142 142
143* You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer) 143* You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
144
145* Note that `ra_lsp_server` binary must be in `$PATH` for this to work. If it's not the case, you can specify full path to the binary, which is typically `.cargo/bin/ra_lsp_server`.
diff --git a/editors/code/package.json b/editors/code/package.json
index 7a48d6794..38824acb4 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -13,7 +13,7 @@
13 "Other" 13 "Other"
14 ], 14 ],
15 "engines": { 15 "engines": {
16 "vscode": "^1.36.0" 16 "vscode": "^1.37.0"
17 }, 17 },
18 "scripts": { 18 "scripts": {
19 "vscode:prepublish": "npm run compile", 19 "vscode:prepublish": "npm run compile",
diff --git a/editors/code/src/commands/cargo_watch.ts b/editors/code/src/commands/cargo_watch.ts
index 4c3c10c8b..00b24dbce 100644
--- a/editors/code/src/commands/cargo_watch.ts
+++ b/editors/code/src/commands/cargo_watch.ts
@@ -1,5 +1,4 @@
1import * as child_process from 'child_process'; 1import * as child_process from 'child_process';
2import * as fs from 'fs';
3import * as path from 'path'; 2import * as path from 'path';
4import * as vscode from 'vscode'; 3import * as vscode from 'vscode';
5 4
@@ -15,23 +14,23 @@ import {
15import SuggestedFixCollection from '../utils/diagnostics/SuggestedFixCollection'; 14import SuggestedFixCollection from '../utils/diagnostics/SuggestedFixCollection';
16import { areDiagnosticsEqual } from '../utils/diagnostics/vscode'; 15import { areDiagnosticsEqual } from '../utils/diagnostics/vscode';
17 16
18export function registerCargoWatchProvider( 17export async function registerCargoWatchProvider(
19 subscriptions: vscode.Disposable[] 18 subscriptions: vscode.Disposable[]
20): CargoWatchProvider | undefined { 19): Promise<CargoWatchProvider | undefined> {
21 let cargoExists = false; 20 let cargoExists = false;
22 const cargoTomlFile = path.join(vscode.workspace.rootPath!, 'Cargo.toml'); 21
23 // Check if the working directory is valid cargo root path 22 // Check if the working directory is valid cargo root path
24 try { 23 const cargoTomlPath = path.join(vscode.workspace.rootPath!, 'Cargo.toml');
25 if (fs.existsSync(cargoTomlFile)) { 24 const cargoTomlUri = vscode.Uri.file(cargoTomlPath);
26 cargoExists = true; 25 const cargoTomlFileInfo = await vscode.workspace.fs.stat(cargoTomlUri);
27 } 26
28 } catch (err) { 27 if (cargoTomlFileInfo) {
29 cargoExists = false; 28 cargoExists = true;
30 } 29 }
31 30
32 if (!cargoExists) { 31 if (!cargoExists) {
33 vscode.window.showErrorMessage( 32 vscode.window.showErrorMessage(
34 `Couldn\'t find \'Cargo.toml\' in ${cargoTomlFile}` 33 `Couldn\'t find \'Cargo.toml\' at ${cargoTomlPath}`
35 ); 34 );
36 return; 35 return;
37 } 36 }