diff options
Diffstat (limited to 'crates')
35 files changed, 1194 insertions, 1380 deletions
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml index 29d9ceb59..5ddac1e48 100644 --- a/crates/ra_assists/Cargo.toml +++ b/crates/ra_assists/Cargo.toml | |||
@@ -5,7 +5,7 @@ version = "0.1.0" | |||
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | lazy_static = "1.3.0" | 8 | once_cell = "0.2.0" |
9 | join_to_string = "0.1.3" | 9 | join_to_string = "0.1.3" |
10 | itertools = "0.8.0" | 10 | itertools = "0.8.0" |
11 | arrayvec = "0.4.10" | 11 | arrayvec = "0.4.10" |
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index 726e5c0a3..aa7aeaabb 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -289,12 +289,10 @@ fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | |||
289 | } | 289 | } |
290 | 290 | ||
291 | mod tokens { | 291 | mod tokens { |
292 | use lazy_static::lazy_static; | 292 | use once_cell::sync::Lazy; |
293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*}; | 293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*}; |
294 | 294 | ||
295 | lazy_static! { | 295 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); |
296 | static ref SOURCE_FILE: TreeArc<SourceFile> = SourceFile::parse(",\n; ;"); | ||
297 | } | ||
298 | 296 | ||
299 | pub(crate) fn comma() -> SyntaxToken<'static> { | 297 | pub(crate) fn comma() -> SyntaxToken<'static> { |
300 | SOURCE_FILE | 298 | SOURCE_FILE |
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index 4516ed660..e6b2b30c6 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -7,9 +7,9 @@ use ra_syntax::AstNode; | |||
7 | 7 | ||
8 | use crate::Result; | 8 | use crate::Result; |
9 | 9 | ||
10 | pub fn run(verbose: bool) -> Result<()> { | 10 | pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { |
11 | let db_load_time = Instant::now(); | 11 | let db_load_time = Instant::now(); |
12 | let (db, roots) = BatchDatabase::load_cargo(".")?; | 12 | let (db, roots) = BatchDatabase::load_cargo(path)?; |
13 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); | 13 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); |
14 | let analysis_time = Instant::now(); | 14 | let analysis_time = Instant::now(); |
15 | let mut num_crates = 0; | 15 | let mut num_crates = 0; |
@@ -57,14 +57,19 @@ pub fn run(verbose: bool) -> Result<()> { | |||
57 | let mut num_exprs_unknown = 0; | 57 | let mut num_exprs_unknown = 0; |
58 | let mut num_exprs_partially_unknown = 0; | 58 | let mut num_exprs_partially_unknown = 0; |
59 | for f in funcs { | 59 | for f in funcs { |
60 | let name = f.name(&db); | ||
60 | if verbose { | 61 | if verbose { |
61 | let (file_id, source) = f.source(&db); | 62 | let (file_id, source) = f.source(&db); |
62 | let original_file = file_id.original_file(&db); | 63 | let original_file = file_id.original_file(&db); |
63 | let path = db.file_relative_path(original_file); | 64 | let path = db.file_relative_path(original_file); |
64 | let syntax_range = source.syntax().range(); | 65 | let syntax_range = source.syntax().range(); |
65 | let name = f.name(&db); | ||
66 | println!("{} ({:?} {})", name, path, syntax_range); | 66 | println!("{} ({:?} {})", name, path, syntax_range); |
67 | } | 67 | } |
68 | if let Some(only_name) = only { | ||
69 | if name.to_string() != only_name { | ||
70 | continue; | ||
71 | } | ||
72 | } | ||
68 | let body = f.body(&db); | 73 | let body = f.body(&db); |
69 | let inference_result = f.infer(&db); | 74 | let inference_result = f.infer(&db); |
70 | for (expr_id, _) in body.exprs() { | 75 | for (expr_id, _) in body.exprs() { |
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index 45555be6e..038f5f3fe 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs | |||
@@ -1,11 +1,10 @@ | |||
1 | mod analysis_stats; | 1 | mod analysis_stats; |
2 | 2 | ||
3 | use std::{fs, io::Read, path::Path}; | 3 | use std::io::Read; |
4 | 4 | ||
5 | use clap::{App, Arg, SubCommand}; | 5 | use clap::{App, Arg, SubCommand}; |
6 | use ra_ide_api::file_structure; | 6 | use ra_ide_api::file_structure; |
7 | use ra_syntax::{SourceFile, TreeArc, AstNode}; | 7 | use ra_syntax::{SourceFile, TreeArc, AstNode}; |
8 | use tools::collect_tests; | ||
9 | use flexi_logger::Logger; | 8 | use flexi_logger::Logger; |
10 | use ra_prof::profile; | 9 | use ra_prof::profile; |
11 | 10 | ||
@@ -15,15 +14,13 @@ fn main() -> Result<()> { | |||
15 | Logger::with_env().start()?; | 14 | Logger::with_env().start()?; |
16 | let matches = App::new("ra-cli") | 15 | let matches = App::new("ra-cli") |
17 | .setting(clap::AppSettings::SubcommandRequiredElseHelp) | 16 | .setting(clap::AppSettings::SubcommandRequiredElseHelp) |
18 | .subcommand( | ||
19 | SubCommand::with_name("render-test") | ||
20 | .arg(Arg::with_name("line").long("--line").required(true).takes_value(true)) | ||
21 | .arg(Arg::with_name("file").long("--file").required(true).takes_value(true)), | ||
22 | ) | ||
23 | .subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump"))) | 17 | .subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump"))) |
24 | .subcommand(SubCommand::with_name("symbols")) | 18 | .subcommand(SubCommand::with_name("symbols")) |
25 | .subcommand( | 19 | .subcommand( |
26 | SubCommand::with_name("analysis-stats").arg(Arg::with_name("verbose").short("v")), | 20 | SubCommand::with_name("analysis-stats") |
21 | .arg(Arg::with_name("verbose").short("v")) | ||
22 | .arg(Arg::with_name("only").short("o").takes_value(true)) | ||
23 | .arg(Arg::with_name("path")), | ||
27 | ) | 24 | ) |
28 | .get_matches(); | 25 | .get_matches(); |
29 | match matches.subcommand() { | 26 | match matches.subcommand() { |
@@ -41,17 +38,11 @@ fn main() -> Result<()> { | |||
41 | println!("{:?}", s); | 38 | println!("{:?}", s); |
42 | } | 39 | } |
43 | } | 40 | } |
44 | ("render-test", Some(matches)) => { | ||
45 | let file = matches.value_of("file").unwrap(); | ||
46 | let file = Path::new(file); | ||
47 | let line: usize = matches.value_of("line").unwrap().parse()?; | ||
48 | let line = line - 1; | ||
49 | let (test, tree) = render_test(file, line)?; | ||
50 | println!("{}\n{}", test, tree); | ||
51 | } | ||
52 | ("analysis-stats", Some(matches)) => { | 41 | ("analysis-stats", Some(matches)) => { |
53 | let verbose = matches.is_present("verbose"); | 42 | let verbose = matches.is_present("verbose"); |
54 | analysis_stats::run(verbose)?; | 43 | let path = matches.value_of("path").unwrap_or(""); |
44 | let only = matches.value_of("only"); | ||
45 | analysis_stats::run(verbose, path, only)?; | ||
55 | } | 46 | } |
56 | _ => unreachable!(), | 47 | _ => unreachable!(), |
57 | } | 48 | } |
@@ -68,18 +59,3 @@ fn read_stdin() -> Result<String> { | |||
68 | ::std::io::stdin().read_to_string(&mut buff)?; | 59 | ::std::io::stdin().read_to_string(&mut buff)?; |
69 | Ok(buff) | 60 | Ok(buff) |
70 | } | 61 | } |
71 | |||
72 | fn render_test(file: &Path, line: usize) -> Result<(String, String)> { | ||
73 | let text = fs::read_to_string(file)?; | ||
74 | let tests = collect_tests(&text); | ||
75 | let test = tests.into_iter().find(|(start_line, t)| { | ||
76 | *start_line <= line && line <= *start_line + t.text.lines().count() | ||
77 | }); | ||
78 | let test = match test { | ||
79 | None => failure::bail!("No test found at line {} at {}", line, file.display()), | ||
80 | Some((_start_line, test)) => test, | ||
81 | }; | ||
82 | let file = SourceFile::parse(&test.text); | ||
83 | let tree = file.syntax().debug_dump(); | ||
84 | Ok((test.text, tree)) | ||
85 | } | ||
diff --git a/crates/ra_hir/src/code_model_api.rs b/crates/ra_hir/src/code_model_api.rs index 55e1793c5..0c4a80bfa 100644 --- a/crates/ra_hir/src/code_model_api.rs +++ b/crates/ra_hir/src/code_model_api.rs | |||
@@ -703,6 +703,10 @@ impl Trait { | |||
703 | TraitRef::for_trait(db, self) | 703 | TraitRef::for_trait(db, self) |
704 | } | 704 | } |
705 | 705 | ||
706 | pub fn is_auto(self, db: &impl DefDatabase) -> bool { | ||
707 | self.trait_data(db).is_auto() | ||
708 | } | ||
709 | |||
706 | pub(crate) fn resolver(&self, db: &impl DefDatabase) -> Resolver { | 710 | pub(crate) fn resolver(&self, db: &impl DefDatabase) -> Resolver { |
707 | let r = self.module(db).resolver(db); | 711 | let r = self.module(db).resolver(db); |
708 | // add generic params, if present | 712 | // add generic params, if present |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 8aaf0375a..8f98ca3a5 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -11,7 +11,7 @@ use crate::{ | |||
11 | DefWithBody, Trait, | 11 | DefWithBody, Trait, |
12 | ids, | 12 | ids, |
13 | nameres::{Namespace, ImportSourceMap, RawItems, CrateDefMap}, | 13 | nameres::{Namespace, ImportSourceMap, RawItems, CrateDefMap}, |
14 | ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, CallableDef, FnSig, TypeCtor}, | 14 | ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, CallableDef, FnSig, TypeCtor, GenericPredicate}, |
15 | adt::{StructData, EnumData}, | 15 | adt::{StructData, EnumData}, |
16 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, | 16 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, |
17 | generics::{GenericParams, GenericDef}, | 17 | generics::{GenericParams, GenericDef}, |
@@ -138,6 +138,9 @@ pub trait HirDatabase: DefDatabase { | |||
138 | #[salsa::invoke(crate::ty::callable_item_sig)] | 138 | #[salsa::invoke(crate::ty::callable_item_sig)] |
139 | fn callable_item_signature(&self, def: CallableDef) -> FnSig; | 139 | fn callable_item_signature(&self, def: CallableDef) -> FnSig; |
140 | 140 | ||
141 | #[salsa::invoke(crate::ty::generic_predicates)] | ||
142 | fn generic_predicates(&self, def: GenericDef) -> Arc<[GenericPredicate]>; | ||
143 | |||
141 | #[salsa::invoke(crate::expr::body_with_source_map_query)] | 144 | #[salsa::invoke(crate::expr::body_with_source_map_query)] |
142 | fn body_with_source_map( | 145 | fn body_with_source_map( |
143 | &self, | 146 | &self, |
@@ -161,6 +164,13 @@ pub trait HirDatabase: DefDatabase { | |||
161 | #[salsa::invoke(crate::ty::traits::solver)] | 164 | #[salsa::invoke(crate::ty::traits::solver)] |
162 | #[salsa::volatile] | 165 | #[salsa::volatile] |
163 | fn solver(&self, krate: Crate) -> Arc<Mutex<crate::ty::traits::Solver>>; | 166 | fn solver(&self, krate: Crate) -> Arc<Mutex<crate::ty::traits::Solver>>; |
167 | |||
168 | #[salsa::invoke(crate::ty::traits::implements)] | ||
169 | fn implements( | ||
170 | &self, | ||
171 | krate: Crate, | ||
172 | goal: crate::ty::Canonical<crate::ty::TraitRef>, | ||
173 | ) -> Option<crate::ty::traits::Solution>; | ||
164 | } | 174 | } |
165 | 175 | ||
166 | #[test] | 176 | #[test] |
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 480eaf171..a2b5db1a1 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs | |||
@@ -10,7 +10,7 @@ use ra_syntax::{ | |||
10 | }; | 10 | }; |
11 | 11 | ||
12 | use crate::{ | 12 | use crate::{ |
13 | Path, Name, HirDatabase, Resolver,DefWithBody, Either, HirFileId, | 13 | Path, Name, HirDatabase, Resolver,DefWithBody, Either, HirFileId, MacroCallLoc, |
14 | name::AsName, | 14 | name::AsName, |
15 | type_ref::{Mutability, TypeRef}, | 15 | type_ref::{Mutability, TypeRef}, |
16 | }; | 16 | }; |
@@ -828,7 +828,8 @@ where | |||
828 | .ast_id(e) | 828 | .ast_id(e) |
829 | .with_file_id(self.current_file_id); | 829 | .with_file_id(self.current_file_id); |
830 | 830 | ||
831 | if let Some(call_id) = self.resolver.resolve_macro_call(self.db, path, ast_id) { | 831 | if let Some(def) = self.resolver.resolve_macro_call(path) { |
832 | let call_id = MacroCallLoc { def, ast_id }.id(self.db); | ||
832 | if let Some(tt) = self.db.macro_expand(call_id).ok() { | 833 | if let Some(tt) = self.db.macro_expand(call_id).ok() { |
833 | if let Some(expr) = mbe::token_tree_to_expr(&tt).ok() { | 834 | if let Some(expr) = mbe::token_tree_to_expr(&tt).ok() { |
834 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); | 835 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); |
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index 2e52c5871..c29b96f50 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs | |||
@@ -8,7 +8,7 @@ use std::sync::Arc; | |||
8 | use ra_syntax::ast::{self, NameOwner, TypeParamsOwner, TypeBoundsOwner}; | 8 | use ra_syntax::ast::{self, NameOwner, TypeParamsOwner, TypeBoundsOwner}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | db::DefDatabase, | 11 | db::{ HirDatabase, DefDatabase}, |
12 | Name, AsName, Function, Struct, Enum, Trait, TypeAlias, ImplBlock, Container, path::Path, type_ref::TypeRef, AdtDef | 12 | Name, AsName, Function, Struct, Enum, Trait, TypeAlias, ImplBlock, Container, path::Path, type_ref::TypeRef, AdtDef |
13 | }; | 13 | }; |
14 | 14 | ||
@@ -32,8 +32,8 @@ pub struct GenericParams { | |||
32 | /// where clauses like `where T: Foo + Bar` are turned into multiple of these. | 32 | /// where clauses like `where T: Foo + Bar` are turned into multiple of these. |
33 | #[derive(Clone, PartialEq, Eq, Debug)] | 33 | #[derive(Clone, PartialEq, Eq, Debug)] |
34 | pub struct WherePredicate { | 34 | pub struct WherePredicate { |
35 | type_ref: TypeRef, | 35 | pub(crate) type_ref: TypeRef, |
36 | trait_ref: Path, | 36 | pub(crate) trait_ref: Path, |
37 | } | 37 | } |
38 | 38 | ||
39 | // FIXME: consts can have type parameters from their parents (i.e. associated consts of traits) | 39 | // FIXME: consts can have type parameters from their parents (i.e. associated consts of traits) |
@@ -90,8 +90,17 @@ impl GenericParams { | |||
90 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { | 90 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { |
91 | for (idx, type_param) in params.type_params().enumerate() { | 91 | for (idx, type_param) in params.type_params().enumerate() { |
92 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); | 92 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); |
93 | let param = GenericParam { idx: idx as u32 + start, name }; | 93 | let param = GenericParam { idx: idx as u32 + start, name: name.clone() }; |
94 | self.params.push(param); | 94 | self.params.push(param); |
95 | |||
96 | let type_ref = TypeRef::Path(name.into()); | ||
97 | for bound in type_param | ||
98 | .type_bound_list() | ||
99 | .iter() | ||
100 | .flat_map(|type_bound_list| type_bound_list.bounds()) | ||
101 | { | ||
102 | self.add_where_predicate_from_bound(bound, type_ref.clone()); | ||
103 | } | ||
95 | } | 104 | } |
96 | } | 105 | } |
97 | 106 | ||
@@ -101,26 +110,28 @@ impl GenericParams { | |||
101 | Some(type_ref) => type_ref, | 110 | Some(type_ref) => type_ref, |
102 | None => continue, | 111 | None => continue, |
103 | }; | 112 | }; |
113 | let type_ref = TypeRef::from_ast(type_ref); | ||
104 | for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { | 114 | for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { |
105 | let path = bound | 115 | self.add_where_predicate_from_bound(bound, type_ref.clone()); |
106 | .type_ref() | ||
107 | .and_then(|tr| match tr.kind() { | ||
108 | ast::TypeRefKind::PathType(path) => path.path(), | ||
109 | _ => None, | ||
110 | }) | ||
111 | .and_then(Path::from_ast); | ||
112 | let path = match path { | ||
113 | Some(p) => p, | ||
114 | None => continue, | ||
115 | }; | ||
116 | self.where_predicates.push(WherePredicate { | ||
117 | type_ref: TypeRef::from_ast(type_ref), | ||
118 | trait_ref: path, | ||
119 | }); | ||
120 | } | 116 | } |
121 | } | 117 | } |
122 | } | 118 | } |
123 | 119 | ||
120 | fn add_where_predicate_from_bound(&mut self, bound: &ast::TypeBound, type_ref: TypeRef) { | ||
121 | let path = bound | ||
122 | .type_ref() | ||
123 | .and_then(|tr| match tr.kind() { | ||
124 | ast::TypeRefKind::PathType(path) => path.path(), | ||
125 | _ => None, | ||
126 | }) | ||
127 | .and_then(Path::from_ast); | ||
128 | let path = match path { | ||
129 | Some(p) => p, | ||
130 | None => return, | ||
131 | }; | ||
132 | self.where_predicates.push(WherePredicate { type_ref, trait_ref: path }); | ||
133 | } | ||
134 | |||
124 | pub(crate) fn find_by_name(&self, name: &Name) -> Option<&GenericParam> { | 135 | pub(crate) fn find_by_name(&self, name: &Name) -> Option<&GenericParam> { |
125 | self.params.iter().find(|p| &p.name == name) | 136 | self.params.iter().find(|p| &p.name == name) |
126 | } | 137 | } |
@@ -148,6 +159,19 @@ impl GenericParams { | |||
148 | } | 159 | } |
149 | } | 160 | } |
150 | 161 | ||
162 | impl GenericDef { | ||
163 | pub(crate) fn resolver(&self, db: &impl HirDatabase) -> crate::Resolver { | ||
164 | match self { | ||
165 | GenericDef::Function(inner) => inner.resolver(db), | ||
166 | GenericDef::Struct(inner) => inner.resolver(db), | ||
167 | GenericDef::Enum(inner) => inner.resolver(db), | ||
168 | GenericDef::Trait(inner) => inner.resolver(db), | ||
169 | GenericDef::TypeAlias(inner) => inner.resolver(db), | ||
170 | GenericDef::ImplBlock(inner) => inner.resolver(db), | ||
171 | } | ||
172 | } | ||
173 | } | ||
174 | |||
151 | impl From<Container> for GenericDef { | 175 | impl From<Container> for GenericDef { |
152 | fn from(c: Container) -> Self { | 176 | fn from(c: Container) -> Self { |
153 | match c { | 177 | match c { |
diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index a8a466e43..b7dd775f1 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs | |||
@@ -93,6 +93,10 @@ impl ImplBlock { | |||
93 | db.impls_in_module(self.module).impls[self.impl_id].items().to_vec() | 93 | db.impls_in_module(self.module).impls[self.impl_id].items().to_vec() |
94 | } | 94 | } |
95 | 95 | ||
96 | pub fn is_negative(&self, db: &impl DefDatabase) -> bool { | ||
97 | db.impls_in_module(self.module).impls[self.impl_id].negative | ||
98 | } | ||
99 | |||
96 | pub(crate) fn resolver(&self, db: &impl DefDatabase) -> Resolver { | 100 | pub(crate) fn resolver(&self, db: &impl DefDatabase) -> Resolver { |
97 | let r = self.module().resolver(db); | 101 | let r = self.module().resolver(db); |
98 | // add generic params, if present | 102 | // add generic params, if present |
@@ -108,6 +112,7 @@ pub struct ImplData { | |||
108 | target_trait: Option<TypeRef>, | 112 | target_trait: Option<TypeRef>, |
109 | target_type: TypeRef, | 113 | target_type: TypeRef, |
110 | items: Vec<ImplItem>, | 114 | items: Vec<ImplItem>, |
115 | negative: bool, | ||
111 | } | 116 | } |
112 | 117 | ||
113 | impl ImplData { | 118 | impl ImplData { |
@@ -120,6 +125,7 @@ impl ImplData { | |||
120 | let target_trait = node.target_trait().map(TypeRef::from_ast); | 125 | let target_trait = node.target_trait().map(TypeRef::from_ast); |
121 | let target_type = TypeRef::from_ast_opt(node.target_type()); | 126 | let target_type = TypeRef::from_ast_opt(node.target_type()); |
122 | let ctx = LocationCtx::new(db, module, file_id); | 127 | let ctx = LocationCtx::new(db, module, file_id); |
128 | let negative = node.is_negative(); | ||
123 | let items = if let Some(item_list) = node.item_list() { | 129 | let items = if let Some(item_list) = node.item_list() { |
124 | item_list | 130 | item_list |
125 | .impl_items() | 131 | .impl_items() |
@@ -132,7 +138,7 @@ impl ImplData { | |||
132 | } else { | 138 | } else { |
133 | Vec::new() | 139 | Vec::new() |
134 | }; | 140 | }; |
135 | ImplData { target_trait, target_type, items } | 141 | ImplData { target_trait, target_type, items, negative } |
136 | } | 142 | } |
137 | 143 | ||
138 | pub fn target_trait(&self) -> Option<&TypeRef> { | 144 | pub fn target_trait(&self) -> Option<&TypeRef> { |
diff --git a/crates/ra_hir/src/marks.rs b/crates/ra_hir/src/marks.rs index 5b6400042..2d831f0d8 100644 --- a/crates/ra_hir/src/marks.rs +++ b/crates/ra_hir/src/marks.rs | |||
@@ -9,4 +9,5 @@ test_utils::marks!( | |||
9 | glob_across_crates | 9 | glob_across_crates |
10 | std_prelude | 10 | std_prelude |
11 | match_ergonomics_ref | 11 | match_ergonomics_ref |
12 | trait_resolution_on_fn_type | ||
12 | ); | 13 | ); |
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs index a450d7b84..0290b3474 100644 --- a/crates/ra_hir/src/nameres.rs +++ b/crates/ra_hir/src/nameres.rs | |||
@@ -272,8 +272,8 @@ impl CrateDefMap { | |||
272 | (res.resolved_def, res.segment_index) | 272 | (res.resolved_def, res.segment_index) |
273 | } | 273 | } |
274 | 274 | ||
275 | pub(crate) fn find_macro(&self, name: &Name) -> Option<&MacroDefId> { | 275 | pub(crate) fn find_macro(&self, name: &Name) -> Option<MacroDefId> { |
276 | self.public_macros.get(name).or(self.local_macros.get(name)) | 276 | self.public_macros.get(name).or(self.local_macros.get(name)).map(|it| *it) |
277 | } | 277 | } |
278 | 278 | ||
279 | // Returns Yes if we are sure that additions to `ItemMap` wouldn't change | 279 | // Returns Yes if we are sure that additions to `ItemMap` wouldn't change |
diff --git a/crates/ra_hir/src/resolve.rs b/crates/ra_hir/src/resolve.rs index 707556ef8..3874e28bf 100644 --- a/crates/ra_hir/src/resolve.rs +++ b/crates/ra_hir/src/resolve.rs | |||
@@ -1,16 +1,12 @@ | |||
1 | //! Name resolution. | 1 | //! Name resolution. |
2 | use std::sync::Arc; | 2 | use std::sync::Arc; |
3 | 3 | ||
4 | use ra_syntax::ast; | 4 | use rustc_hash::{FxHashMap, FxHashSet}; |
5 | |||
6 | use rustc_hash::FxHashMap; | ||
7 | 5 | ||
8 | use crate::{ | 6 | use crate::{ |
9 | ModuleDef, Trait, | 7 | ModuleDef, Trait, |
10 | code_model_api::Crate, | 8 | code_model_api::Crate, |
11 | MacroCallId, | 9 | MacroDefId, |
12 | MacroCallLoc, | ||
13 | AstId, | ||
14 | db::HirDatabase, | 10 | db::HirDatabase, |
15 | name::{Name, KnownName}, | 11 | name::{Name, KnownName}, |
16 | nameres::{PerNs, CrateDefMap, CrateModuleId}, | 12 | nameres::{PerNs, CrateDefMap, CrateModuleId}, |
@@ -134,16 +130,9 @@ impl Resolver { | |||
134 | resolution | 130 | resolution |
135 | } | 131 | } |
136 | 132 | ||
137 | pub fn resolve_macro_call( | 133 | pub(crate) fn resolve_macro_call(&self, path: Option<Path>) -> Option<MacroDefId> { |
138 | &self, | ||
139 | db: &impl HirDatabase, | ||
140 | path: Option<Path>, | ||
141 | ast_id: AstId<ast::MacroCall>, | ||
142 | ) -> Option<MacroCallId> { | ||
143 | let name = path.and_then(|path| path.expand_macro_expr()).unwrap_or_else(Name::missing); | 134 | let name = path.and_then(|path| path.expand_macro_expr()).unwrap_or_else(Name::missing); |
144 | let def_id = self.module().and_then(|(module, _)| module.find_macro(&name))?; | 135 | self.module()?.0.find_macro(&name) |
145 | let call_loc = MacroCallLoc { def: *def_id, ast_id }.id(db); | ||
146 | Some(call_loc) | ||
147 | } | 136 | } |
148 | 137 | ||
149 | /// Returns the resolved path segments | 138 | /// Returns the resolved path segments |
@@ -193,19 +182,18 @@ impl Resolver { | |||
193 | names | 182 | names |
194 | } | 183 | } |
195 | 184 | ||
196 | pub(crate) fn traits_in_scope<'a>(&'a self) -> impl Iterator<Item = Trait> + 'a { | 185 | pub(crate) fn traits_in_scope(&self, db: &impl HirDatabase) -> FxHashSet<Trait> { |
197 | // FIXME prelude | 186 | let mut traits = FxHashSet::default(); |
198 | self.scopes | 187 | for scope in &self.scopes { |
199 | .iter() | 188 | if let Scope::ModuleScope(m) = scope { |
200 | .rev() | 189 | if let Some(prelude) = m.crate_def_map.prelude() { |
201 | .flat_map(|scope| { | 190 | let prelude_def_map = db.crate_def_map(prelude.krate); |
202 | match scope { | 191 | traits.extend(prelude_def_map[prelude.module_id].scope.traits()); |
203 | Scope::ModuleScope(m) => Some(m.crate_def_map[m.module_id].scope.traits()), | ||
204 | _ => None, | ||
205 | } | 192 | } |
206 | .into_iter() | 193 | traits.extend(m.crate_def_map[m.module_id].scope.traits()); |
207 | }) | 194 | } |
208 | .flatten() | 195 | } |
196 | traits | ||
209 | } | 197 | } |
210 | 198 | ||
211 | fn module(&self) -> Option<(&CrateDefMap, CrateModuleId)> { | 199 | fn module(&self) -> Option<(&CrateDefMap, CrateModuleId)> { |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 31bf13425..179faebfb 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -20,7 +20,7 @@ use crate::{ | |||
20 | HirDatabase, Function, Struct, Enum, Const, Static, Either, DefWithBody, PerNs, Name, | 20 | HirDatabase, Function, Struct, Enum, Const, Static, Either, DefWithBody, PerNs, Name, |
21 | AsName, Module, HirFileId, Crate, Trait, Resolver, Ty,Path, | 21 | AsName, Module, HirFileId, Crate, Trait, Resolver, Ty,Path, |
22 | expr::{BodySourceMap, scope::{ScopeId, ExprScopes}}, | 22 | expr::{BodySourceMap, scope::{ScopeId, ExprScopes}}, |
23 | ids::{LocationCtx,MacroCallId}, | 23 | ids::{LocationCtx, MacroDefId}, |
24 | docs::{docs_from_ast,Documentation}, | 24 | docs::{docs_from_ast,Documentation}, |
25 | expr, AstId, | 25 | expr, AstId, |
26 | }; | 26 | }; |
@@ -191,13 +191,12 @@ pub enum PathResolution { | |||
191 | 191 | ||
192 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 192 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
193 | pub struct MacroByExampleDef { | 193 | pub struct MacroByExampleDef { |
194 | pub(crate) id: MacroCallId, | 194 | pub(crate) id: MacroDefId, |
195 | } | 195 | } |
196 | 196 | ||
197 | impl MacroByExampleDef { | 197 | impl MacroByExampleDef { |
198 | pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::MacroCall>) { | 198 | pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, TreeArc<ast::MacroCall>) { |
199 | let loc = self.id.loc(db); | 199 | (self.id.0.file_id(), self.id.0.to_node(db)) |
200 | (self.id.into(), loc.def.0.to_node(db)) | ||
201 | } | 200 | } |
202 | } | 201 | } |
203 | 202 | ||
@@ -284,21 +283,9 @@ impl SourceAnalyzer { | |||
284 | self.infer.as_ref()?.field_resolution(expr_id) | 283 | self.infer.as_ref()?.field_resolution(expr_id) |
285 | } | 284 | } |
286 | 285 | ||
287 | pub fn resolve_macro_call( | 286 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroByExampleDef> { |
288 | &self, | 287 | let id = self.resolver.resolve_macro_call(macro_call.path().and_then(Path::from_ast))?; |
289 | db: &impl HirDatabase, | 288 | Some(MacroByExampleDef { id }) |
290 | file_id: FileId, | ||
291 | macro_call: &ast::MacroCall, | ||
292 | ) -> Option<MacroByExampleDef> { | ||
293 | let hir_id = file_id.into(); | ||
294 | let ast_id = db.ast_id_map(hir_id).ast_id(macro_call).with_file_id(hir_id); | ||
295 | let call_id = self.resolver.resolve_macro_call( | ||
296 | db, | ||
297 | macro_call.path().and_then(Path::from_ast), | ||
298 | ast_id, | ||
299 | ); | ||
300 | |||
301 | call_id.map(|id| MacroByExampleDef { id }) | ||
302 | } | 289 | } |
303 | 290 | ||
304 | pub fn resolve_hir_path( | 291 | pub fn resolve_hir_path( |
diff --git a/crates/ra_hir/src/traits.rs b/crates/ra_hir/src/traits.rs index 15f0977b7..dfe883fa4 100644 --- a/crates/ra_hir/src/traits.rs +++ b/crates/ra_hir/src/traits.rs | |||
@@ -11,6 +11,7 @@ use crate::{Function, Const, TypeAlias, Name, DefDatabase, Trait, ids::LocationC | |||
11 | pub struct TraitData { | 11 | pub struct TraitData { |
12 | name: Option<Name>, | 12 | name: Option<Name>, |
13 | items: Vec<TraitItem>, | 13 | items: Vec<TraitItem>, |
14 | auto: bool, | ||
14 | } | 15 | } |
15 | 16 | ||
16 | impl TraitData { | 17 | impl TraitData { |
@@ -19,6 +20,7 @@ impl TraitData { | |||
19 | let name = node.name().map(|n| n.as_name()); | 20 | let name = node.name().map(|n| n.as_name()); |
20 | let module = tr.module(db); | 21 | let module = tr.module(db); |
21 | let ctx = LocationCtx::new(db, module, file_id); | 22 | let ctx = LocationCtx::new(db, module, file_id); |
23 | let auto = node.is_auto(); | ||
22 | let items = if let Some(item_list) = node.item_list() { | 24 | let items = if let Some(item_list) = node.item_list() { |
23 | item_list | 25 | item_list |
24 | .impl_items() | 26 | .impl_items() |
@@ -31,7 +33,7 @@ impl TraitData { | |||
31 | } else { | 33 | } else { |
32 | Vec::new() | 34 | Vec::new() |
33 | }; | 35 | }; |
34 | Arc::new(TraitData { name, items }) | 36 | Arc::new(TraitData { name, items, auto }) |
35 | } | 37 | } |
36 | 38 | ||
37 | pub(crate) fn name(&self) -> &Option<Name> { | 39 | pub(crate) fn name(&self) -> &Option<Name> { |
@@ -41,6 +43,10 @@ impl TraitData { | |||
41 | pub(crate) fn items(&self) -> &[TraitItem] { | 43 | pub(crate) fn items(&self) -> &[TraitItem] { |
42 | &self.items | 44 | &self.items |
43 | } | 45 | } |
46 | |||
47 | pub(crate) fn is_auto(&self) -> bool { | ||
48 | self.auto | ||
49 | } | ||
44 | } | 50 | } |
45 | 51 | ||
46 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 52 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index f4eee835f..cfe07156b 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -19,7 +19,7 @@ use std::{fmt, mem}; | |||
19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams}; | 19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams}; |
20 | use display::{HirDisplay, HirFormatter}; | 20 | use display::{HirDisplay, HirFormatter}; |
21 | 21 | ||
22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig}; | 22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates}; |
23 | pub(crate) use infer::{infer, InferenceResult, InferTy}; | 23 | pub(crate) use infer::{infer, InferenceResult, InferTy}; |
24 | pub use lower::CallableDef; | 24 | pub use lower::CallableDef; |
25 | 25 | ||
@@ -234,13 +234,42 @@ impl TraitRef { | |||
234 | } | 234 | } |
235 | } | 235 | } |
236 | 236 | ||
237 | /// Like `generics::WherePredicate`, but with resolved types: A condition on the | ||
238 | /// parameters of a generic item. | ||
239 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
240 | pub enum GenericPredicate { | ||
241 | /// The given trait needs to be implemented for its type parameters. | ||
242 | Implemented(TraitRef), | ||
243 | /// We couldn't resolve the trait reference. (If some type parameters can't | ||
244 | /// be resolved, they will just be Unknown). | ||
245 | Error, | ||
246 | } | ||
247 | |||
248 | impl GenericPredicate { | ||
249 | pub fn is_error(&self) -> bool { | ||
250 | match self { | ||
251 | GenericPredicate::Error => true, | ||
252 | _ => false, | ||
253 | } | ||
254 | } | ||
255 | |||
256 | pub fn subst(self, substs: &Substs) -> GenericPredicate { | ||
257 | match self { | ||
258 | GenericPredicate::Implemented(trait_ref) => { | ||
259 | GenericPredicate::Implemented(trait_ref.subst(substs)) | ||
260 | } | ||
261 | GenericPredicate::Error => self, | ||
262 | } | ||
263 | } | ||
264 | } | ||
265 | |||
237 | /// Basically a claim (currently not validated / checked) that the contained | 266 | /// Basically a claim (currently not validated / checked) that the contained |
238 | /// type / trait ref contains no inference variables; any inference variables it | 267 | /// type / trait ref contains no inference variables; any inference variables it |
239 | /// contained have been replaced by bound variables, and `num_vars` tells us how | 268 | /// contained have been replaced by bound variables, and `num_vars` tells us how |
240 | /// many there are. This is used to erase irrelevant differences between types | 269 | /// many there are. This is used to erase irrelevant differences between types |
241 | /// before using them in queries. | 270 | /// before using them in queries. |
242 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 271 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
243 | pub(crate) struct Canonical<T> { | 272 | pub struct Canonical<T> { |
244 | pub value: T, | 273 | pub value: T, |
245 | pub num_vars: usize, | 274 | pub num_vars: usize, |
246 | } | 275 | } |
@@ -534,3 +563,20 @@ impl HirDisplay for Ty { | |||
534 | Ok(()) | 563 | Ok(()) |
535 | } | 564 | } |
536 | } | 565 | } |
566 | |||
567 | impl HirDisplay for TraitRef { | ||
568 | fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { | ||
569 | write!( | ||
570 | f, | ||
571 | "{}: {}", | ||
572 | self.substs[0].display(f.db), | ||
573 | self.trait_.name(f.db).unwrap_or_else(Name::missing) | ||
574 | )?; | ||
575 | if self.substs.len() > 1 { | ||
576 | write!(f, "<")?; | ||
577 | f.write_joined(&self.substs[1..], ", ")?; | ||
578 | write!(f, ">")?; | ||
579 | } | ||
580 | Ok(()) | ||
581 | } | ||
582 | } | ||
diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs index edce1afe7..1e7d97f51 100644 --- a/crates/ra_hir/src/ty/infer.rs +++ b/crates/ra_hir/src/ty/infer.rs | |||
@@ -328,8 +328,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
328 | Obligation::Trait(tr) => { | 328 | Obligation::Trait(tr) => { |
329 | let canonicalized = self.canonicalizer().canonicalize_trait_ref(tr.clone()); | 329 | let canonicalized = self.canonicalizer().canonicalize_trait_ref(tr.clone()); |
330 | ( | 330 | ( |
331 | super::traits::implements( | 331 | self.db.implements( |
332 | self.db, | ||
333 | self.resolver.krate().unwrap(), | 332 | self.resolver.krate().unwrap(), |
334 | canonicalized.value.clone(), | 333 | canonicalized.value.clone(), |
335 | ), | 334 | ), |
diff --git a/crates/ra_hir/src/ty/lower.rs b/crates/ra_hir/src/ty/lower.rs index 8bab7e54b..09d26ce5a 100644 --- a/crates/ra_hir/src/ty/lower.rs +++ b/crates/ra_hir/src/ty/lower.rs | |||
@@ -5,6 +5,7 @@ | |||
5 | //! - Building the type for an item: This happens through the `type_for_def` query. | 5 | //! - Building the type for an item: This happens through the `type_for_def` query. |
6 | //! | 6 | //! |
7 | //! This usually involves resolving names, collecting generic arguments etc. | 7 | //! This usually involves resolving names, collecting generic arguments etc. |
8 | use std::sync::Arc; | ||
8 | use std::iter; | 9 | use std::iter; |
9 | 10 | ||
10 | use crate::{ | 11 | use crate::{ |
@@ -18,9 +19,9 @@ use crate::{ | |||
18 | resolve::{Resolver, Resolution}, | 19 | resolve::{Resolver, Resolution}, |
19 | path::{PathSegment, GenericArg}, | 20 | path::{PathSegment, GenericArg}, |
20 | generics::{GenericParams, HasGenericParams}, | 21 | generics::{GenericParams, HasGenericParams}, |
21 | adt::VariantDef, Trait | 22 | adt::VariantDef, Trait, generics::{ WherePredicate, GenericDef} |
22 | }; | 23 | }; |
23 | use super::{Ty, primitive, FnSig, Substs, TypeCtor, TraitRef}; | 24 | use super::{Ty, primitive, FnSig, Substs, TypeCtor, TraitRef, GenericPredicate}; |
24 | 25 | ||
25 | impl Ty { | 26 | impl Ty { |
26 | pub(crate) fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self { | 27 | pub(crate) fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self { |
@@ -208,16 +209,12 @@ pub(super) fn substs_from_path_segment( | |||
208 | } | 209 | } |
209 | 210 | ||
210 | impl TraitRef { | 211 | impl TraitRef { |
211 | pub(crate) fn from_hir( | 212 | pub(crate) fn from_path( |
212 | db: &impl HirDatabase, | 213 | db: &impl HirDatabase, |
213 | resolver: &Resolver, | 214 | resolver: &Resolver, |
214 | type_ref: &TypeRef, | 215 | path: &Path, |
215 | explicit_self_ty: Option<Ty>, | 216 | explicit_self_ty: Option<Ty>, |
216 | ) -> Option<Self> { | 217 | ) -> Option<Self> { |
217 | let path = match type_ref { | ||
218 | TypeRef::Path(path) => path, | ||
219 | _ => return None, | ||
220 | }; | ||
221 | let resolved = match resolver.resolve_path(db, &path).take_types()? { | 218 | let resolved = match resolver.resolve_path(db, &path).take_types()? { |
222 | Resolution::Def(ModuleDef::Trait(tr)) => tr, | 219 | Resolution::Def(ModuleDef::Trait(tr)) => tr, |
223 | _ => return None, | 220 | _ => return None, |
@@ -232,6 +229,19 @@ impl TraitRef { | |||
232 | Some(TraitRef { trait_: resolved, substs }) | 229 | Some(TraitRef { trait_: resolved, substs }) |
233 | } | 230 | } |
234 | 231 | ||
232 | pub(crate) fn from_hir( | ||
233 | db: &impl HirDatabase, | ||
234 | resolver: &Resolver, | ||
235 | type_ref: &TypeRef, | ||
236 | explicit_self_ty: Option<Ty>, | ||
237 | ) -> Option<Self> { | ||
238 | let path = match type_ref { | ||
239 | TypeRef::Path(path) => path, | ||
240 | _ => return None, | ||
241 | }; | ||
242 | TraitRef::from_path(db, resolver, path, explicit_self_ty) | ||
243 | } | ||
244 | |||
235 | fn substs_from_path( | 245 | fn substs_from_path( |
236 | db: &impl HirDatabase, | 246 | db: &impl HirDatabase, |
237 | resolver: &Resolver, | 247 | resolver: &Resolver, |
@@ -246,6 +256,15 @@ impl TraitRef { | |||
246 | let substs = Substs::identity(&trait_.generic_params(db)); | 256 | let substs = Substs::identity(&trait_.generic_params(db)); |
247 | TraitRef { trait_, substs } | 257 | TraitRef { trait_, substs } |
248 | } | 258 | } |
259 | |||
260 | pub(crate) fn for_where_predicate( | ||
261 | db: &impl HirDatabase, | ||
262 | resolver: &Resolver, | ||
263 | pred: &WherePredicate, | ||
264 | ) -> Option<TraitRef> { | ||
265 | let self_ty = Ty::from_hir(db, resolver, &pred.type_ref); | ||
266 | TraitRef::from_path(db, resolver, &pred.trait_ref, Some(self_ty)) | ||
267 | } | ||
249 | } | 268 | } |
250 | 269 | ||
251 | /// Build the declared type of an item. This depends on the namespace; e.g. for | 270 | /// Build the declared type of an item. This depends on the namespace; e.g. for |
@@ -294,6 +313,24 @@ pub(crate) fn type_for_field(db: &impl HirDatabase, field: StructField) -> Ty { | |||
294 | Ty::from_hir(db, &resolver, type_ref) | 313 | Ty::from_hir(db, &resolver, type_ref) |
295 | } | 314 | } |
296 | 315 | ||
316 | /// Resolve the where clause(s) of an item with generics. | ||
317 | pub(crate) fn generic_predicates( | ||
318 | db: &impl HirDatabase, | ||
319 | def: GenericDef, | ||
320 | ) -> Arc<[GenericPredicate]> { | ||
321 | let resolver = def.resolver(db); | ||
322 | let generic_params = def.generic_params(db); | ||
323 | let predicates = generic_params | ||
324 | .where_predicates | ||
325 | .iter() | ||
326 | .map(|pred| { | ||
327 | TraitRef::for_where_predicate(db, &resolver, pred) | ||
328 | .map_or(GenericPredicate::Error, GenericPredicate::Implemented) | ||
329 | }) | ||
330 | .collect::<Vec<_>>(); | ||
331 | predicates.into() | ||
332 | } | ||
333 | |||
297 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { | 334 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { |
298 | let signature = def.signature(db); | 335 | let signature = def.signature(db); |
299 | let resolver = def.resolver(db); | 336 | let resolver = def.resolver(db); |
diff --git a/crates/ra_hir/src/ty/method_resolution.rs b/crates/ra_hir/src/ty/method_resolution.rs index 607e9ba79..34817a5ec 100644 --- a/crates/ra_hir/src/ty/method_resolution.rs +++ b/crates/ra_hir/src/ty/method_resolution.rs | |||
@@ -75,11 +75,13 @@ impl CrateImplBlocks { | |||
75 | 75 | ||
76 | let target_ty = impl_block.target_ty(db); | 76 | let target_ty = impl_block.target_ty(db); |
77 | 77 | ||
78 | if let Some(tr) = impl_block.target_trait_ref(db) { | 78 | if impl_block.target_trait(db).is_some() { |
79 | self.impls_by_trait | 79 | if let Some(tr) = impl_block.target_trait_ref(db) { |
80 | .entry(tr.trait_) | 80 | self.impls_by_trait |
81 | .or_insert_with(Vec::new) | 81 | .entry(tr.trait_) |
82 | .push((module.module_id, impl_id)); | 82 | .or_insert_with(Vec::new) |
83 | .push((module.module_id, impl_id)); | ||
84 | } | ||
83 | } else { | 85 | } else { |
84 | if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) { | 86 | if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) { |
85 | self.impls | 87 | self.impls |
@@ -183,7 +185,7 @@ fn iterate_trait_method_candidates<T>( | |||
183 | mut callback: impl FnMut(&Ty, Function) -> Option<T>, | 185 | mut callback: impl FnMut(&Ty, Function) -> Option<T>, |
184 | ) -> Option<T> { | 186 | ) -> Option<T> { |
185 | let krate = resolver.krate()?; | 187 | let krate = resolver.krate()?; |
186 | 'traits: for t in resolver.traits_in_scope() { | 188 | 'traits: for t in resolver.traits_in_scope(db) { |
187 | let data = t.trait_data(db); | 189 | let data = t.trait_data(db); |
188 | // we'll be lazy about checking whether the type implements the | 190 | // we'll be lazy about checking whether the type implements the |
189 | // trait, but if we find out it doesn't, we'll skip the rest of the | 191 | // trait, but if we find out it doesn't, we'll skip the rest of the |
@@ -196,8 +198,7 @@ fn iterate_trait_method_candidates<T>( | |||
196 | if name.map_or(true, |name| sig.name() == name) && sig.has_self_param() { | 198 | if name.map_or(true, |name| sig.name() == name) && sig.has_self_param() { |
197 | if !known_implemented { | 199 | if !known_implemented { |
198 | let trait_ref = canonical_trait_ref(db, t, ty.clone()); | 200 | let trait_ref = canonical_trait_ref(db, t, ty.clone()); |
199 | // FIXME cache this implements check (without solution) in a query? | 201 | if db.implements(krate, trait_ref).is_none() { |
200 | if super::traits::implements(db, krate, trait_ref).is_none() { | ||
201 | continue 'traits; | 202 | continue 'traits; |
202 | } | 203 | } |
203 | } | 204 | } |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index a38fe35c7..978cc2587 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -2502,6 +2502,50 @@ fn test() { (&S).foo()<|>; } | |||
2502 | } | 2502 | } |
2503 | 2503 | ||
2504 | #[test] | 2504 | #[test] |
2505 | fn method_resolution_trait_from_prelude() { | ||
2506 | let (mut db, pos) = MockDatabase::with_position( | ||
2507 | r#" | ||
2508 | //- /main.rs | ||
2509 | struct S; | ||
2510 | impl Clone for S {} | ||
2511 | |||
2512 | fn test() { | ||
2513 | S.clone()<|>; | ||
2514 | } | ||
2515 | |||
2516 | //- /lib.rs | ||
2517 | #[prelude_import] use foo::*; | ||
2518 | |||
2519 | mod foo { | ||
2520 | trait Clone { | ||
2521 | fn clone(&self) -> Self; | ||
2522 | } | ||
2523 | } | ||
2524 | "#, | ||
2525 | ); | ||
2526 | db.set_crate_graph_from_fixture(crate_graph! { | ||
2527 | "main": ("/main.rs", ["other_crate"]), | ||
2528 | "other_crate": ("/lib.rs", []), | ||
2529 | }); | ||
2530 | assert_eq!("S", type_at_pos(&db, pos)); | ||
2531 | } | ||
2532 | |||
2533 | #[test] | ||
2534 | fn method_resolution_where_clause_for_unknown_trait() { | ||
2535 | // The blanket impl shouldn't apply because we can't even resolve UnknownTrait | ||
2536 | let t = type_at( | ||
2537 | r#" | ||
2538 | //- /main.rs | ||
2539 | trait Trait { fn foo(self) -> u128; } | ||
2540 | struct S; | ||
2541 | impl<T> Trait for T where T: UnknownTrait {} | ||
2542 | fn test() { (&S).foo()<|>; } | ||
2543 | "#, | ||
2544 | ); | ||
2545 | assert_eq!(t, "{unknown}"); | ||
2546 | } | ||
2547 | |||
2548 | #[test] | ||
2505 | fn method_resolution_where_clause_not_met() { | 2549 | fn method_resolution_where_clause_not_met() { |
2506 | // The blanket impl shouldn't apply because we can't prove S: Clone | 2550 | // The blanket impl shouldn't apply because we can't prove S: Clone |
2507 | let t = type_at( | 2551 | let t = type_at( |
@@ -2510,12 +2554,122 @@ fn method_resolution_where_clause_not_met() { | |||
2510 | trait Clone {} | 2554 | trait Clone {} |
2511 | trait Trait { fn foo(self) -> u128; } | 2555 | trait Trait { fn foo(self) -> u128; } |
2512 | struct S; | 2556 | struct S; |
2513 | impl S { fn foo(self) -> i8 { 0 } } | 2557 | impl<T> Trait for T where T: Clone {} |
2514 | impl<T> Trait for T where T: Clone { fn foo(self) -> u128 { 0 } } | ||
2515 | fn test() { (&S).foo()<|>; } | 2558 | fn test() { (&S).foo()<|>; } |
2516 | "#, | 2559 | "#, |
2517 | ); | 2560 | ); |
2518 | assert_eq!(t, "i8"); | 2561 | // This is also to make sure that we don't resolve to the foo method just |
2562 | // because that's the only method named foo we can find, which would make | ||
2563 | // the below tests not work | ||
2564 | assert_eq!(t, "{unknown}"); | ||
2565 | } | ||
2566 | |||
2567 | #[test] | ||
2568 | fn method_resolution_where_clause_inline_not_met() { | ||
2569 | // The blanket impl shouldn't apply because we can't prove S: Clone | ||
2570 | let t = type_at( | ||
2571 | r#" | ||
2572 | //- /main.rs | ||
2573 | trait Clone {} | ||
2574 | trait Trait { fn foo(self) -> u128; } | ||
2575 | struct S; | ||
2576 | impl<T: Clone> Trait for T {} | ||
2577 | fn test() { (&S).foo()<|>; } | ||
2578 | "#, | ||
2579 | ); | ||
2580 | assert_eq!(t, "{unknown}"); | ||
2581 | } | ||
2582 | |||
2583 | #[test] | ||
2584 | fn method_resolution_where_clause_1() { | ||
2585 | let t = type_at( | ||
2586 | r#" | ||
2587 | //- /main.rs | ||
2588 | trait Clone {} | ||
2589 | trait Trait { fn foo(self) -> u128; } | ||
2590 | struct S; | ||
2591 | impl Clone for S {}; | ||
2592 | impl<T> Trait for T where T: Clone {} | ||
2593 | fn test() { S.foo()<|>; } | ||
2594 | "#, | ||
2595 | ); | ||
2596 | assert_eq!(t, "u128"); | ||
2597 | } | ||
2598 | |||
2599 | #[test] | ||
2600 | fn method_resolution_where_clause_2() { | ||
2601 | let t = type_at( | ||
2602 | r#" | ||
2603 | //- /main.rs | ||
2604 | trait Into<T> { fn into(self) -> T; } | ||
2605 | trait From<T> { fn from(other: T) -> Self; } | ||
2606 | struct S1; | ||
2607 | struct S2; | ||
2608 | impl From<S2> for S1 {}; | ||
2609 | impl<T, U> Into<U> for T where U: From<T> {} | ||
2610 | fn test() { S2.into()<|>; } | ||
2611 | "#, | ||
2612 | ); | ||
2613 | assert_eq!(t, "S1"); | ||
2614 | } | ||
2615 | |||
2616 | #[test] | ||
2617 | fn method_resolution_where_clause_inline() { | ||
2618 | let t = type_at( | ||
2619 | r#" | ||
2620 | //- /main.rs | ||
2621 | trait Into<T> { fn into(self) -> T; } | ||
2622 | trait From<T> { fn from(other: T) -> Self; } | ||
2623 | struct S1; | ||
2624 | struct S2; | ||
2625 | impl From<S2> for S1 {}; | ||
2626 | impl<T, U: From<T>> Into<U> for T {} | ||
2627 | fn test() { S2.into()<|>; } | ||
2628 | "#, | ||
2629 | ); | ||
2630 | assert_eq!(t, "S1"); | ||
2631 | } | ||
2632 | |||
2633 | #[test] | ||
2634 | fn method_resolution_encountering_fn_type() { | ||
2635 | covers!(trait_resolution_on_fn_type); | ||
2636 | type_at( | ||
2637 | r#" | ||
2638 | //- /main.rs | ||
2639 | fn foo() {} | ||
2640 | trait FnOnce { fn call(self); } | ||
2641 | fn test() { foo.call()<|>; } | ||
2642 | "#, | ||
2643 | ); | ||
2644 | } | ||
2645 | |||
2646 | #[test] | ||
2647 | fn method_resolution_slow() { | ||
2648 | // this can get quite slow if we set the solver size limit too high | ||
2649 | let t = type_at( | ||
2650 | r#" | ||
2651 | //- /main.rs | ||
2652 | trait SendX {} | ||
2653 | |||
2654 | struct S1; impl SendX for S1; | ||
2655 | struct S2; impl SendX for S2; | ||
2656 | struct U1; | ||
2657 | |||
2658 | trait Trait { fn method(self); } | ||
2659 | |||
2660 | struct X1<A, B> {} | ||
2661 | impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {} | ||
2662 | |||
2663 | struct S<B, C> {} | ||
2664 | |||
2665 | trait FnX {} | ||
2666 | |||
2667 | impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {} | ||
2668 | |||
2669 | fn test() { (S {}).method()<|>; } | ||
2670 | "#, | ||
2671 | ); | ||
2672 | assert_eq!(t, "{unknown}"); | ||
2519 | } | 2673 | } |
2520 | 2674 | ||
2521 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { | 2675 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { |
diff --git a/crates/ra_hir/src/ty/traits.rs b/crates/ra_hir/src/ty/traits.rs index a1ed0c028..4260f7ef7 100644 --- a/crates/ra_hir/src/ty/traits.rs +++ b/crates/ra_hir/src/ty/traits.rs | |||
@@ -1,6 +1,7 @@ | |||
1 | //! Trait solving using Chalk. | 1 | //! Trait solving using Chalk. |
2 | use std::sync::{Arc, Mutex}; | 2 | use std::sync::{Arc, Mutex}; |
3 | 3 | ||
4 | use rustc_hash::FxHashSet; | ||
4 | use log::debug; | 5 | use log::debug; |
5 | use chalk_ir::cast::Cast; | 6 | use chalk_ir::cast::Cast; |
6 | 7 | ||
@@ -13,6 +14,11 @@ mod chalk; | |||
13 | 14 | ||
14 | pub(crate) type Solver = chalk_solve::Solver; | 15 | pub(crate) type Solver = chalk_solve::Solver; |
15 | 16 | ||
17 | /// This controls the maximum size of types Chalk considers. If we set this too | ||
18 | /// high, we can run into slow edge cases; if we set it too low, Chalk won't | ||
19 | /// find some solutions. | ||
20 | const CHALK_SOLVER_MAX_SIZE: usize = 2; | ||
21 | |||
16 | #[derive(Debug, Copy, Clone)] | 22 | #[derive(Debug, Copy, Clone)] |
17 | struct ChalkContext<'a, DB> { | 23 | struct ChalkContext<'a, DB> { |
18 | db: &'a DB, | 24 | db: &'a DB, |
@@ -21,7 +27,8 @@ struct ChalkContext<'a, DB> { | |||
21 | 27 | ||
22 | pub(crate) fn solver(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver>> { | 28 | pub(crate) fn solver(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver>> { |
23 | // krate parameter is just so we cache a unique solver per crate | 29 | // krate parameter is just so we cache a unique solver per crate |
24 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: 10 }; | 30 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; |
31 | debug!("Creating new solver for crate {:?}", _krate); | ||
25 | Arc::new(Mutex::new(solver_choice.into_solver())) | 32 | Arc::new(Mutex::new(solver_choice.into_solver())) |
26 | } | 33 | } |
27 | 34 | ||
@@ -31,7 +38,7 @@ pub(crate) fn impls_for_trait( | |||
31 | krate: Crate, | 38 | krate: Crate, |
32 | trait_: Trait, | 39 | trait_: Trait, |
33 | ) -> Arc<[ImplBlock]> { | 40 | ) -> Arc<[ImplBlock]> { |
34 | let mut impls = Vec::new(); | 41 | let mut impls = FxHashSet::default(); |
35 | // We call the query recursively here. On the one hand, this means we can | 42 | // We call the query recursively here. On the one hand, this means we can |
36 | // reuse results from queries for different crates; on the other hand, this | 43 | // reuse results from queries for different crates; on the other hand, this |
37 | // will only ever get called for a few crates near the root of the tree (the | 44 | // will only ever get called for a few crates near the root of the tree (the |
@@ -42,7 +49,7 @@ pub(crate) fn impls_for_trait( | |||
42 | } | 49 | } |
43 | let crate_impl_blocks = db.impls_in_crate(krate); | 50 | let crate_impl_blocks = db.impls_in_crate(krate); |
44 | impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(&trait_)); | 51 | impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(&trait_)); |
45 | impls.into() | 52 | impls.into_iter().collect::<Vec<_>>().into() |
46 | } | 53 | } |
47 | 54 | ||
48 | fn solve( | 55 | fn solve( |
@@ -52,6 +59,7 @@ fn solve( | |||
52 | ) -> Option<chalk_solve::Solution> { | 59 | ) -> Option<chalk_solve::Solution> { |
53 | let context = ChalkContext { db, krate }; | 60 | let context = ChalkContext { db, krate }; |
54 | let solver = db.solver(krate); | 61 | let solver = db.solver(krate); |
62 | debug!("solve goal: {:?}", goal); | ||
55 | let solution = solver.lock().unwrap().solve(&context, goal); | 63 | let solution = solver.lock().unwrap().solve(&context, goal); |
56 | debug!("solve({:?}) => {:?}", goal, solution); | 64 | debug!("solve({:?}) => {:?}", goal, solution); |
57 | solution | 65 | solution |
@@ -125,11 +133,11 @@ fn solution_from_chalk(db: &impl HirDatabase, solution: chalk_solve::Solution) - | |||
125 | } | 133 | } |
126 | 134 | ||
127 | #[derive(Clone, Debug, PartialEq, Eq)] | 135 | #[derive(Clone, Debug, PartialEq, Eq)] |
128 | pub(crate) struct SolutionVariables(pub Canonical<Vec<Ty>>); | 136 | pub struct SolutionVariables(pub Canonical<Vec<Ty>>); |
129 | 137 | ||
130 | #[derive(Clone, Debug, PartialEq, Eq)] | 138 | #[derive(Clone, Debug, PartialEq, Eq)] |
131 | /// A (possible) solution for a proposed goal. | 139 | /// A (possible) solution for a proposed goal. |
132 | pub(crate) enum Solution { | 140 | pub enum Solution { |
133 | /// The goal indeed holds, and there is a unique value for all existential | 141 | /// The goal indeed holds, and there is a unique value for all existential |
134 | /// variables. | 142 | /// variables. |
135 | Unique(SolutionVariables), | 143 | Unique(SolutionVariables), |
@@ -144,7 +152,7 @@ pub(crate) enum Solution { | |||
144 | #[derive(Clone, Debug, PartialEq, Eq)] | 152 | #[derive(Clone, Debug, PartialEq, Eq)] |
145 | /// When a goal holds ambiguously (e.g., because there are multiple possible | 153 | /// When a goal holds ambiguously (e.g., because there are multiple possible |
146 | /// solutions), we issue a set of *guidance* back to type inference. | 154 | /// solutions), we issue a set of *guidance* back to type inference. |
147 | pub(crate) enum Guidance { | 155 | pub enum Guidance { |
148 | /// The existential variables *must* have the given values if the goal is | 156 | /// The existential variables *must* have the given values if the goal is |
149 | /// ever to hold, but that alone isn't enough to guarantee the goal will | 157 | /// ever to hold, but that alone isn't enough to guarantee the goal will |
150 | /// actually hold. | 158 | /// actually hold. |
diff --git a/crates/ra_hir/src/ty/traits/chalk.rs b/crates/ra_hir/src/ty/traits/chalk.rs index 8b77d21b4..78440b258 100644 --- a/crates/ra_hir/src/ty/traits/chalk.rs +++ b/crates/ra_hir/src/ty/traits/chalk.rs | |||
@@ -6,15 +6,22 @@ use log::debug; | |||
6 | use chalk_ir::{TypeId, ImplId, TypeKindId, ProjectionTy, Parameter, Identifier, cast::Cast, PlaceholderIndex, UniverseIndex, TypeName}; | 6 | use chalk_ir::{TypeId, ImplId, TypeKindId, ProjectionTy, Parameter, Identifier, cast::Cast, PlaceholderIndex, UniverseIndex, TypeName}; |
7 | use chalk_rust_ir::{AssociatedTyDatum, TraitDatum, StructDatum, ImplDatum}; | 7 | use chalk_rust_ir::{AssociatedTyDatum, TraitDatum, StructDatum, ImplDatum}; |
8 | 8 | ||
9 | use test_utils::tested_by; | ||
9 | use ra_db::salsa::{InternId, InternKey}; | 10 | use ra_db::salsa::{InternId, InternKey}; |
10 | 11 | ||
11 | use crate::{ | 12 | use crate::{ |
12 | Trait, HasGenericParams, ImplBlock, | 13 | Trait, HasGenericParams, ImplBlock, |
13 | db::HirDatabase, | 14 | db::HirDatabase, |
14 | ty::{TraitRef, Ty, ApplicationTy, TypeCtor, Substs}, | 15 | ty::{TraitRef, Ty, ApplicationTy, TypeCtor, Substs, GenericPredicate, CallableDef}, |
16 | ty::display::HirDisplay, | ||
17 | generics::GenericDef, | ||
15 | }; | 18 | }; |
16 | use super::ChalkContext; | 19 | use super::ChalkContext; |
17 | 20 | ||
21 | /// This represents a trait whose name we could not resolve. | ||
22 | const UNKNOWN_TRAIT: chalk_ir::TraitId = | ||
23 | chalk_ir::TraitId(chalk_ir::RawId { index: u32::max_value() }); | ||
24 | |||
18 | pub(super) trait ToChalk { | 25 | pub(super) trait ToChalk { |
19 | type Chalk; | 26 | type Chalk; |
20 | fn to_chalk(self, db: &impl HirDatabase) -> Self::Chalk; | 27 | fn to_chalk(self, db: &impl HirDatabase) -> Self::Chalk; |
@@ -45,7 +52,10 @@ impl ToChalk for Ty { | |||
45 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), | 52 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), |
46 | // FIXME this is clearly incorrect, but probably not too incorrect | 53 | // FIXME this is clearly incorrect, but probably not too incorrect |
47 | // and I'm not sure what to actually do with Ty::Unknown | 54 | // and I'm not sure what to actually do with Ty::Unknown |
48 | Ty::Unknown => PlaceholderIndex { ui: UniverseIndex::ROOT, idx: 0 }.to_ty(), | 55 | // maybe an alternative would be `for<T> T`? (meaningless in rust, but expressible in chalk's Ty) |
56 | Ty::Unknown => { | ||
57 | PlaceholderIndex { ui: UniverseIndex::ROOT, idx: usize::max_value() }.to_ty() | ||
58 | } | ||
49 | } | 59 | } |
50 | } | 60 | } |
51 | fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty) -> Self { | 61 | fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty) -> Self { |
@@ -146,6 +156,33 @@ impl ToChalk for ImplBlock { | |||
146 | } | 156 | } |
147 | } | 157 | } |
148 | 158 | ||
159 | impl ToChalk for GenericPredicate { | ||
160 | type Chalk = chalk_ir::QuantifiedWhereClause; | ||
161 | |||
162 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::QuantifiedWhereClause { | ||
163 | match self { | ||
164 | GenericPredicate::Implemented(trait_ref) => { | ||
165 | make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) | ||
166 | } | ||
167 | GenericPredicate::Error => { | ||
168 | let impossible_trait_ref = chalk_ir::TraitRef { | ||
169 | trait_id: UNKNOWN_TRAIT, | ||
170 | parameters: vec![Ty::Unknown.to_chalk(db).cast()], | ||
171 | }; | ||
172 | make_binders(chalk_ir::WhereClause::Implemented(impossible_trait_ref), 0) | ||
173 | } | ||
174 | } | ||
175 | } | ||
176 | |||
177 | fn from_chalk( | ||
178 | _db: &impl HirDatabase, | ||
179 | _where_clause: chalk_ir::QuantifiedWhereClause, | ||
180 | ) -> GenericPredicate { | ||
181 | // This should never need to be called | ||
182 | unimplemented!() | ||
183 | } | ||
184 | } | ||
185 | |||
149 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { | 186 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { |
150 | chalk_ir::Binders { | 187 | chalk_ir::Binders { |
151 | value, | 188 | value, |
@@ -153,6 +190,40 @@ fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { | |||
153 | } | 190 | } |
154 | } | 191 | } |
155 | 192 | ||
193 | fn blacklisted_trait(db: &impl HirDatabase, trait_: Trait) -> bool { | ||
194 | let name = trait_.name(db).unwrap_or_else(crate::Name::missing).to_string(); | ||
195 | match &*name { | ||
196 | "Send" | "Sync" | "Sized" | "Fn" | "FnMut" | "FnOnce" => true, | ||
197 | _ => false, | ||
198 | } | ||
199 | } | ||
200 | |||
201 | fn convert_where_clauses( | ||
202 | db: &impl HirDatabase, | ||
203 | def: GenericDef, | ||
204 | substs: &Substs, | ||
205 | ) -> Vec<chalk_ir::QuantifiedWhereClause> { | ||
206 | let generic_predicates = db.generic_predicates(def); | ||
207 | let mut result = Vec::with_capacity(generic_predicates.len()); | ||
208 | for pred in generic_predicates.iter() { | ||
209 | if pred.is_error() { | ||
210 | // HACK: Return just the single predicate (which is always false | ||
211 | // anyway), otherwise Chalk can easily get into slow situations | ||
212 | return vec![pred.clone().subst(substs).to_chalk(db)]; | ||
213 | } | ||
214 | match pred { | ||
215 | GenericPredicate::Implemented(trait_ref) => { | ||
216 | if blacklisted_trait(db, trait_ref.trait_) { | ||
217 | continue; | ||
218 | } | ||
219 | } | ||
220 | _ => {} | ||
221 | } | ||
222 | result.push(pred.clone().subst(substs).to_chalk(db)); | ||
223 | } | ||
224 | result | ||
225 | } | ||
226 | |||
156 | impl<'a, DB> chalk_solve::RustIrDatabase for ChalkContext<'a, DB> | 227 | impl<'a, DB> chalk_solve::RustIrDatabase for ChalkContext<'a, DB> |
157 | where | 228 | where |
158 | DB: HirDatabase, | 229 | DB: HirDatabase, |
@@ -162,18 +233,36 @@ where | |||
162 | } | 233 | } |
163 | fn trait_datum(&self, trait_id: chalk_ir::TraitId) -> Arc<TraitDatum> { | 234 | fn trait_datum(&self, trait_id: chalk_ir::TraitId) -> Arc<TraitDatum> { |
164 | debug!("trait_datum {:?}", trait_id); | 235 | debug!("trait_datum {:?}", trait_id); |
236 | if trait_id == UNKNOWN_TRAIT { | ||
237 | let trait_datum_bound = chalk_rust_ir::TraitDatumBound { | ||
238 | trait_ref: chalk_ir::TraitRef { | ||
239 | trait_id: UNKNOWN_TRAIT, | ||
240 | parameters: vec![chalk_ir::Ty::BoundVar(0).cast()], | ||
241 | }, | ||
242 | associated_ty_ids: Vec::new(), | ||
243 | where_clauses: Vec::new(), | ||
244 | flags: chalk_rust_ir::TraitFlags { | ||
245 | auto: false, | ||
246 | marker: false, | ||
247 | upstream: true, | ||
248 | fundamental: false, | ||
249 | }, | ||
250 | }; | ||
251 | return Arc::new(TraitDatum { binders: make_binders(trait_datum_bound, 1) }); | ||
252 | } | ||
165 | let trait_: Trait = from_chalk(self.db, trait_id); | 253 | let trait_: Trait = from_chalk(self.db, trait_id); |
254 | debug!("trait {:?} = {:?}", trait_id, trait_.name(self.db)); | ||
166 | let generic_params = trait_.generic_params(self.db); | 255 | let generic_params = trait_.generic_params(self.db); |
167 | let bound_vars = Substs::bound_vars(&generic_params); | 256 | let bound_vars = Substs::bound_vars(&generic_params); |
168 | let trait_ref = trait_.trait_ref(self.db).subst(&bound_vars).to_chalk(self.db); | 257 | let trait_ref = trait_.trait_ref(self.db).subst(&bound_vars).to_chalk(self.db); |
169 | let flags = chalk_rust_ir::TraitFlags { | 258 | let flags = chalk_rust_ir::TraitFlags { |
259 | auto: trait_.is_auto(self.db), | ||
260 | upstream: trait_.module(self.db).krate(self.db) != Some(self.krate), | ||
170 | // FIXME set these flags correctly | 261 | // FIXME set these flags correctly |
171 | auto: false, | ||
172 | marker: false, | 262 | marker: false, |
173 | upstream: trait_.module(self.db).krate(self.db) != Some(self.krate), | ||
174 | fundamental: false, | 263 | fundamental: false, |
175 | }; | 264 | }; |
176 | let where_clauses = Vec::new(); // FIXME add where clauses | 265 | let where_clauses = convert_where_clauses(self.db, trait_.into(), &bound_vars); |
177 | let associated_ty_ids = Vec::new(); // FIXME add associated tys | 266 | let associated_ty_ids = Vec::new(); // FIXME add associated tys |
178 | let trait_datum_bound = | 267 | let trait_datum_bound = |
179 | chalk_rust_ir::TraitDatumBound { trait_ref, where_clauses, flags, associated_ty_ids }; | 268 | chalk_rust_ir::TraitDatumBound { trait_ref, where_clauses, flags, associated_ty_ids }; |
@@ -183,23 +272,51 @@ where | |||
183 | fn struct_datum(&self, struct_id: chalk_ir::StructId) -> Arc<StructDatum> { | 272 | fn struct_datum(&self, struct_id: chalk_ir::StructId) -> Arc<StructDatum> { |
184 | debug!("struct_datum {:?}", struct_id); | 273 | debug!("struct_datum {:?}", struct_id); |
185 | let type_ctor = from_chalk(self.db, struct_id); | 274 | let type_ctor = from_chalk(self.db, struct_id); |
275 | debug!("struct {:?} = {:?}", struct_id, type_ctor); | ||
186 | // FIXME might be nicer if we can create a fake GenericParams for the TypeCtor | 276 | // FIXME might be nicer if we can create a fake GenericParams for the TypeCtor |
187 | // FIXME extract this to a method on Ty | 277 | // FIXME extract this to a method on Ty |
188 | let (num_params, upstream) = match type_ctor { | 278 | let (num_params, where_clauses, upstream) = match type_ctor { |
189 | TypeCtor::Bool | 279 | TypeCtor::Bool |
190 | | TypeCtor::Char | 280 | | TypeCtor::Char |
191 | | TypeCtor::Int(_) | 281 | | TypeCtor::Int(_) |
192 | | TypeCtor::Float(_) | 282 | | TypeCtor::Float(_) |
193 | | TypeCtor::Never | 283 | | TypeCtor::Never |
194 | | TypeCtor::Str => (0, true), | 284 | | TypeCtor::Str => (0, vec![], true), |
195 | TypeCtor::Slice | TypeCtor::Array | TypeCtor::RawPtr(_) | TypeCtor::Ref(_) => (1, true), | 285 | TypeCtor::Slice | TypeCtor::Array | TypeCtor::RawPtr(_) | TypeCtor::Ref(_) => { |
196 | TypeCtor::FnPtr { num_args } => (num_args as usize + 1, true), | 286 | (1, vec![], true) |
197 | TypeCtor::Tuple { cardinality } => (cardinality as usize, true), | 287 | } |
198 | TypeCtor::FnDef(_) => unimplemented!(), | 288 | TypeCtor::FnPtr { num_args } => (num_args as usize + 1, vec![], true), |
289 | TypeCtor::Tuple { cardinality } => (cardinality as usize, vec![], true), | ||
290 | TypeCtor::FnDef(callable) => { | ||
291 | tested_by!(trait_resolution_on_fn_type); | ||
292 | let krate = match callable { | ||
293 | CallableDef::Function(f) => f.module(self.db).krate(self.db), | ||
294 | CallableDef::Struct(s) => s.module(self.db).krate(self.db), | ||
295 | CallableDef::EnumVariant(v) => { | ||
296 | v.parent_enum(self.db).module(self.db).krate(self.db) | ||
297 | } | ||
298 | }; | ||
299 | let generic_def: GenericDef = match callable { | ||
300 | CallableDef::Function(f) => f.into(), | ||
301 | CallableDef::Struct(s) => s.into(), | ||
302 | CallableDef::EnumVariant(v) => v.parent_enum(self.db).into(), | ||
303 | }; | ||
304 | let generic_params = generic_def.generic_params(self.db); | ||
305 | let bound_vars = Substs::bound_vars(&generic_params); | ||
306 | let where_clauses = convert_where_clauses(self.db, generic_def, &bound_vars); | ||
307 | ( | ||
308 | generic_params.count_params_including_parent(), | ||
309 | where_clauses, | ||
310 | krate != Some(self.krate), | ||
311 | ) | ||
312 | } | ||
199 | TypeCtor::Adt(adt) => { | 313 | TypeCtor::Adt(adt) => { |
200 | let generic_params = adt.generic_params(self.db); | 314 | let generic_params = adt.generic_params(self.db); |
315 | let bound_vars = Substs::bound_vars(&generic_params); | ||
316 | let where_clauses = convert_where_clauses(self.db, adt.into(), &bound_vars); | ||
201 | ( | 317 | ( |
202 | generic_params.count_params_including_parent(), | 318 | generic_params.count_params_including_parent(), |
319 | where_clauses, | ||
203 | adt.krate(self.db) != Some(self.krate), | 320 | adt.krate(self.db) != Some(self.krate), |
204 | ) | 321 | ) |
205 | } | 322 | } |
@@ -209,7 +326,6 @@ where | |||
209 | // FIXME set fundamental flag correctly | 326 | // FIXME set fundamental flag correctly |
210 | fundamental: false, | 327 | fundamental: false, |
211 | }; | 328 | }; |
212 | let where_clauses = Vec::new(); // FIXME add where clauses | ||
213 | let self_ty = chalk_ir::ApplicationTy { | 329 | let self_ty = chalk_ir::ApplicationTy { |
214 | name: TypeName::TypeKindId(type_ctor.to_chalk(self.db).into()), | 330 | name: TypeName::TypeKindId(type_ctor.to_chalk(self.db).into()), |
215 | parameters: (0..num_params).map(|i| chalk_ir::Ty::BoundVar(i).cast()).collect(), | 331 | parameters: (0..num_params).map(|i| chalk_ir::Ty::BoundVar(i).cast()).collect(), |
@@ -237,10 +353,23 @@ where | |||
237 | } else { | 353 | } else { |
238 | chalk_rust_ir::ImplType::External | 354 | chalk_rust_ir::ImplType::External |
239 | }; | 355 | }; |
356 | let where_clauses = convert_where_clauses(self.db, impl_block.into(), &bound_vars); | ||
357 | let negative = impl_block.is_negative(self.db); | ||
358 | debug!( | ||
359 | "impl {:?}: {}{} where {:?}", | ||
360 | impl_id, | ||
361 | if negative { "!" } else { "" }, | ||
362 | trait_ref.display(self.db), | ||
363 | where_clauses | ||
364 | ); | ||
365 | let trait_ref = trait_ref.to_chalk(self.db); | ||
240 | let impl_datum_bound = chalk_rust_ir::ImplDatumBound { | 366 | let impl_datum_bound = chalk_rust_ir::ImplDatumBound { |
241 | // FIXME handle negative impls (impl !Sync for Foo) | 367 | trait_ref: if negative { |
242 | trait_ref: chalk_rust_ir::PolarizedTraitRef::Positive(trait_ref.to_chalk(self.db)), | 368 | chalk_rust_ir::PolarizedTraitRef::Negative(trait_ref) |
243 | where_clauses: Vec::new(), // FIXME add where clauses | 369 | } else { |
370 | chalk_rust_ir::PolarizedTraitRef::Positive(trait_ref) | ||
371 | }, | ||
372 | where_clauses, | ||
244 | associated_ty_values: Vec::new(), // FIXME add associated type values | 373 | associated_ty_values: Vec::new(), // FIXME add associated type values |
245 | impl_type, | 374 | impl_type, |
246 | }; | 375 | }; |
@@ -249,16 +378,22 @@ where | |||
249 | } | 378 | } |
250 | fn impls_for_trait(&self, trait_id: chalk_ir::TraitId) -> Vec<ImplId> { | 379 | fn impls_for_trait(&self, trait_id: chalk_ir::TraitId) -> Vec<ImplId> { |
251 | debug!("impls_for_trait {:?}", trait_id); | 380 | debug!("impls_for_trait {:?}", trait_id); |
252 | let trait_ = from_chalk(self.db, trait_id); | 381 | if trait_id == UNKNOWN_TRAIT { |
253 | self.db | 382 | return Vec::new(); |
383 | } | ||
384 | let trait_: Trait = from_chalk(self.db, trait_id); | ||
385 | let blacklisted = blacklisted_trait(self.db, trait_); | ||
386 | if blacklisted { | ||
387 | return Vec::new(); | ||
388 | } | ||
389 | let result: Vec<_> = self | ||
390 | .db | ||
254 | .impls_for_trait(self.krate, trait_) | 391 | .impls_for_trait(self.krate, trait_) |
255 | .iter() | 392 | .iter() |
256 | // FIXME temporary hack -- as long as we're not lowering where clauses | ||
257 | // correctly, ignore impls with them completely so as to not treat | ||
258 | // impl<T> Trait for T where T: ... as a blanket impl on all types | ||
259 | .filter(|impl_block| impl_block.generic_params(self.db).where_predicates.is_empty()) | ||
260 | .map(|impl_block| impl_block.to_chalk(self.db)) | 393 | .map(|impl_block| impl_block.to_chalk(self.db)) |
261 | .collect() | 394 | .collect(); |
395 | debug!("impls_for_trait returned {} impls", result.len()); | ||
396 | result | ||
262 | } | 397 | } |
263 | fn impl_provided_for( | 398 | fn impl_provided_for( |
264 | &self, | 399 | &self, |
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 533c229fe..adae29e9c 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs | |||
@@ -69,7 +69,7 @@ pub(crate) fn reference_definition( | |||
69 | .and_then(ast::MacroCall::cast) | 69 | .and_then(ast::MacroCall::cast) |
70 | { | 70 | { |
71 | tested_by!(goto_definition_works_for_macros); | 71 | tested_by!(goto_definition_works_for_macros); |
72 | if let Some(macro_call) = analyzer.resolve_macro_call(db, file_id, macro_call) { | 72 | if let Some(macro_call) = analyzer.resolve_macro_call(macro_call) { |
73 | return Exact(NavigationTarget::from_macro_def(db, macro_call)); | 73 | return Exact(NavigationTarget::from_macro_def(db, macro_call)); |
74 | } | 74 | } |
75 | } | 75 | } |
diff --git a/crates/ra_prof/Cargo.toml b/crates/ra_prof/Cargo.toml index 19ce21783..5f23e865c 100644 --- a/crates/ra_prof/Cargo.toml +++ b/crates/ra_prof/Cargo.toml | |||
@@ -6,4 +6,4 @@ authors = ["rust-analyzer developers"] | |||
6 | publish = false | 6 | publish = false |
7 | 7 | ||
8 | [dependencies] | 8 | [dependencies] |
9 | lazy_static = "1.3.0" \ No newline at end of file | 9 | once_cell = "0.2.0" |
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs index 9ecb8e744..e56446c9f 100644 --- a/crates/ra_prof/src/lib.rs +++ b/crates/ra_prof/src/lib.rs | |||
@@ -1,13 +1,14 @@ | |||
1 | use std::cell::RefCell; | 1 | use std::{ |
2 | use std::time::{Duration, Instant}; | 2 | cell::RefCell, |
3 | use std::mem; | 3 | time::{Duration, Instant}, |
4 | use std::io::{stderr, Write}; | 4 | mem, |
5 | use std::iter::repeat; | 5 | io::{stderr, Write}, |
6 | use std::collections::{HashSet}; | 6 | iter::repeat, |
7 | use std::default::Default; | 7 | collections::HashSet, |
8 | use std::iter::FromIterator; | 8 | sync::{RwLock, atomic::{AtomicBool, Ordering}}, |
9 | use std::sync::{RwLock, atomic::{AtomicBool, Ordering}}; | 9 | }; |
10 | use lazy_static::lazy_static; | 10 | |
11 | use once_cell::sync::Lazy; | ||
11 | 12 | ||
12 | /// Set profiling filter. It specifies descriptions allowed to profile. | 13 | /// Set profiling filter. It specifies descriptions allowed to profile. |
13 | /// This is helpful when call stack has too many nested profiling scopes. | 14 | /// This is helpful when call stack has too many nested profiling scopes. |
@@ -21,7 +22,7 @@ use lazy_static::lazy_static; | |||
21 | /// ``` | 22 | /// ``` |
22 | pub fn set_filter(f: Filter) { | 23 | pub fn set_filter(f: Filter) { |
23 | PROFILING_ENABLED.store(f.depth > 0, Ordering::SeqCst); | 24 | PROFILING_ENABLED.store(f.depth > 0, Ordering::SeqCst); |
24 | let set = HashSet::from_iter(f.allowed.iter().cloned()); | 25 | let set: HashSet<_> = f.allowed.iter().cloned().collect(); |
25 | let mut old = FILTER.write().unwrap(); | 26 | let mut old = FILTER.write().unwrap(); |
26 | let filter_data = FilterData { | 27 | let filter_data = FilterData { |
27 | depth: f.depth, | 28 | depth: f.depth, |
@@ -161,9 +162,7 @@ struct FilterData { | |||
161 | 162 | ||
162 | static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false); | 163 | static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false); |
163 | 164 | ||
164 | lazy_static! { | 165 | static FILTER: Lazy<RwLock<FilterData>> = Lazy::new(Default::default); |
165 | static ref FILTER: RwLock<FilterData> = RwLock::new(Default::default()); | ||
166 | } | ||
167 | 166 | ||
168 | thread_local!(static PROFILE_STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new())); | 167 | thread_local!(static PROFILE_STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new())); |
169 | 168 | ||
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index 9cbd2c6b8..f3466c585 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs | |||
@@ -170,6 +170,10 @@ impl ast::ImplBlock { | |||
170 | let second = types.next(); | 170 | let second = types.next(); |
171 | (first, second) | 171 | (first, second) |
172 | } | 172 | } |
173 | |||
174 | pub fn is_negative(&self) -> bool { | ||
175 | self.syntax().children_with_tokens().any(|t| t.kind() == EXCL) | ||
176 | } | ||
173 | } | 177 | } |
174 | 178 | ||
175 | #[derive(Debug, Clone, PartialEq, Eq)] | 179 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -348,3 +352,9 @@ impl ast::WherePred { | |||
348 | .find(|it| it.kind() == LIFETIME) | 352 | .find(|it| it.kind() == LIFETIME) |
349 | } | 353 | } |
350 | } | 354 | } |
355 | |||
356 | impl ast::TraitDef { | ||
357 | pub fn is_auto(&self) -> bool { | ||
358 | self.syntax().children_with_tokens().any(|t| t.kind() == AUTO_KW) | ||
359 | } | ||
360 | } | ||
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 4f5165452..65c65d6aa 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -23,7 +23,6 @@ mod syntax_node; | |||
23 | mod syntax_text; | 23 | mod syntax_text; |
24 | mod syntax_error; | 24 | mod syntax_error; |
25 | mod parsing; | 25 | mod parsing; |
26 | mod string_lexing; | ||
27 | mod validation; | 26 | mod validation; |
28 | mod ptr; | 27 | mod ptr; |
29 | 28 | ||
diff --git a/crates/ra_syntax/src/string_lexing.rs b/crates/ra_syntax/src/string_lexing.rs deleted file mode 100644 index 4c3eea3d2..000000000 --- a/crates/ra_syntax/src/string_lexing.rs +++ /dev/null | |||
@@ -1,333 +0,0 @@ | |||
1 | use crate::{TextRange, TextUnit}; | ||
2 | use self::StringComponentKind::*; | ||
3 | |||
4 | #[derive(Debug, Eq, PartialEq, Clone)] | ||
5 | pub(crate) struct StringComponent { | ||
6 | pub(crate) range: TextRange, | ||
7 | pub(crate) kind: StringComponentKind, | ||
8 | } | ||
9 | |||
10 | #[derive(Debug, Eq, PartialEq, Clone)] | ||
11 | pub(crate) enum StringComponentKind { | ||
12 | IgnoreNewline, | ||
13 | CodePoint, | ||
14 | AsciiEscape, | ||
15 | AsciiCodeEscape, | ||
16 | UnicodeEscape, | ||
17 | } | ||
18 | |||
19 | pub(crate) fn parse_quoted_literal( | ||
20 | prefix: Option<char>, | ||
21 | quote: char, | ||
22 | src: &str, | ||
23 | ) -> StringComponentIter { | ||
24 | let prefix = prefix.map(|p| match p { | ||
25 | 'b' => b'b', | ||
26 | _ => panic!("invalid prefix"), | ||
27 | }); | ||
28 | let quote = match quote { | ||
29 | '\'' => b'\'', | ||
30 | '"' => b'"', | ||
31 | _ => panic!("invalid quote"), | ||
32 | }; | ||
33 | StringComponentIter { src, prefix, quote, pos: 0, has_closing_quote: false, suffix: None } | ||
34 | } | ||
35 | |||
36 | pub(crate) struct StringComponentIter<'a> { | ||
37 | src: &'a str, | ||
38 | prefix: Option<u8>, | ||
39 | quote: u8, | ||
40 | pos: usize, | ||
41 | pub(crate) has_closing_quote: bool, | ||
42 | pub(crate) suffix: Option<TextRange>, | ||
43 | } | ||
44 | |||
45 | impl<'a> Iterator for StringComponentIter<'a> { | ||
46 | type Item = StringComponent; | ||
47 | fn next(&mut self) -> Option<StringComponent> { | ||
48 | if self.pos == 0 { | ||
49 | if let Some(prefix) = self.prefix { | ||
50 | assert!( | ||
51 | self.advance() == prefix as char, | ||
52 | "literal should start with a {:?}", | ||
53 | prefix as char, | ||
54 | ); | ||
55 | } | ||
56 | assert!( | ||
57 | self.advance() == self.quote as char, | ||
58 | "literal should start with a {:?}", | ||
59 | self.quote as char, | ||
60 | ); | ||
61 | } | ||
62 | |||
63 | if let Some(component) = self.parse_component() { | ||
64 | return Some(component); | ||
65 | } | ||
66 | |||
67 | // We get here when there are no char components left to parse | ||
68 | if self.peek() == Some(self.quote as char) { | ||
69 | self.advance(); | ||
70 | self.has_closing_quote = true; | ||
71 | if let Some(range) = self.parse_suffix() { | ||
72 | self.suffix = Some(range); | ||
73 | } | ||
74 | } | ||
75 | |||
76 | assert!( | ||
77 | self.peek() == None, | ||
78 | "literal should leave no unparsed input: src = {:?}, pos = {}, length = {}", | ||
79 | self.src, | ||
80 | self.pos, | ||
81 | self.src.len() | ||
82 | ); | ||
83 | |||
84 | None | ||
85 | } | ||
86 | } | ||
87 | |||
88 | impl<'a> StringComponentIter<'a> { | ||
89 | fn peek(&self) -> Option<char> { | ||
90 | if self.pos == self.src.len() { | ||
91 | return None; | ||
92 | } | ||
93 | |||
94 | self.src[self.pos..].chars().next() | ||
95 | } | ||
96 | |||
97 | fn advance(&mut self) -> char { | ||
98 | let next = self.peek().expect("cannot advance if end of input is reached"); | ||
99 | self.pos += next.len_utf8(); | ||
100 | next | ||
101 | } | ||
102 | |||
103 | fn parse_component(&mut self) -> Option<StringComponent> { | ||
104 | let next = self.peek()?; | ||
105 | |||
106 | // Ignore string close | ||
107 | if next == self.quote as char { | ||
108 | return None; | ||
109 | } | ||
110 | |||
111 | let start = self.start_range(); | ||
112 | self.advance(); | ||
113 | |||
114 | if next == '\\' { | ||
115 | // Strings can use `\` to ignore newlines, so we first try to parse one of those | ||
116 | // before falling back to parsing char escapes | ||
117 | if self.quote == b'"' { | ||
118 | if let Some(component) = self.parse_ignore_newline(start) { | ||
119 | return Some(component); | ||
120 | } | ||
121 | } | ||
122 | |||
123 | Some(self.parse_escape(start)) | ||
124 | } else { | ||
125 | Some(self.finish_component(start, CodePoint)) | ||
126 | } | ||
127 | } | ||
128 | |||
129 | fn parse_ignore_newline(&mut self, start: TextUnit) -> Option<StringComponent> { | ||
130 | // In string literals, when a `\` occurs immediately before the newline, the `\`, | ||
131 | // the newline, and all whitespace at the beginning of the next line are ignored | ||
132 | match self.peek() { | ||
133 | Some('\n') | Some('\r') => { | ||
134 | self.skip_whitespace(); | ||
135 | Some(self.finish_component(start, IgnoreNewline)) | ||
136 | } | ||
137 | _ => None, | ||
138 | } | ||
139 | } | ||
140 | |||
141 | fn skip_whitespace(&mut self) { | ||
142 | while self.peek().map(|c| c.is_whitespace()) == Some(true) { | ||
143 | self.advance(); | ||
144 | } | ||
145 | } | ||
146 | |||
147 | fn parse_escape(&mut self, start: TextUnit) -> StringComponent { | ||
148 | if self.peek().is_none() { | ||
149 | return self.finish_component(start, AsciiEscape); | ||
150 | } | ||
151 | |||
152 | let next = self.advance(); | ||
153 | match next { | ||
154 | 'x' => self.parse_ascii_code_escape(start), | ||
155 | 'u' => self.parse_unicode_escape(start), | ||
156 | _ => self.finish_component(start, AsciiEscape), | ||
157 | } | ||
158 | } | ||
159 | |||
160 | fn parse_unicode_escape(&mut self, start: TextUnit) -> StringComponent { | ||
161 | match self.peek() { | ||
162 | Some('{') => { | ||
163 | self.advance(); | ||
164 | |||
165 | // Parse anything until we reach `}` | ||
166 | while let Some(next) = self.peek() { | ||
167 | self.advance(); | ||
168 | if next == '}' { | ||
169 | break; | ||
170 | } | ||
171 | } | ||
172 | |||
173 | self.finish_component(start, UnicodeEscape) | ||
174 | } | ||
175 | Some(_) | None => self.finish_component(start, UnicodeEscape), | ||
176 | } | ||
177 | } | ||
178 | |||
179 | fn parse_ascii_code_escape(&mut self, start: TextUnit) -> StringComponent { | ||
180 | let code_start = self.pos; | ||
181 | while let Some(next) = self.peek() { | ||
182 | if next == '\'' || (self.pos - code_start == 2) { | ||
183 | break; | ||
184 | } | ||
185 | |||
186 | self.advance(); | ||
187 | } | ||
188 | self.finish_component(start, AsciiCodeEscape) | ||
189 | } | ||
190 | |||
191 | fn parse_suffix(&mut self) -> Option<TextRange> { | ||
192 | let start = self.start_range(); | ||
193 | let _ = self.peek()?; | ||
194 | while let Some(_) = self.peek() { | ||
195 | self.advance(); | ||
196 | } | ||
197 | Some(self.finish_range(start)) | ||
198 | } | ||
199 | |||
200 | fn start_range(&self) -> TextUnit { | ||
201 | TextUnit::from_usize(self.pos) | ||
202 | } | ||
203 | |||
204 | fn finish_range(&self, start: TextUnit) -> TextRange { | ||
205 | TextRange::from_to(start, TextUnit::from_usize(self.pos)) | ||
206 | } | ||
207 | |||
208 | fn finish_component(&self, start: TextUnit, kind: StringComponentKind) -> StringComponent { | ||
209 | let range = self.finish_range(start); | ||
210 | StringComponent { range, kind } | ||
211 | } | ||
212 | } | ||
213 | |||
214 | #[cfg(test)] | ||
215 | mod tests { | ||
216 | use super::*; | ||
217 | |||
218 | fn parse(src: &str) -> (bool, Vec<StringComponent>) { | ||
219 | let component_iterator = &mut parse_quoted_literal(None, '\'', src); | ||
220 | let components: Vec<_> = component_iterator.collect(); | ||
221 | (component_iterator.has_closing_quote, components) | ||
222 | } | ||
223 | |||
224 | fn unclosed_char_component(src: &str) -> StringComponent { | ||
225 | let (has_closing_quote, components) = parse(src); | ||
226 | assert!(!has_closing_quote, "char should not have closing quote"); | ||
227 | assert!(components.len() == 1); | ||
228 | components[0].clone() | ||
229 | } | ||
230 | |||
231 | fn closed_char_component(src: &str) -> StringComponent { | ||
232 | let (has_closing_quote, components) = parse(src); | ||
233 | assert!(has_closing_quote, "char should have closing quote"); | ||
234 | assert!(components.len() == 1, "Literal: {}\nComponents: {:#?}", src, components); | ||
235 | components[0].clone() | ||
236 | } | ||
237 | |||
238 | fn closed_char_components(src: &str) -> Vec<StringComponent> { | ||
239 | let (has_closing_quote, components) = parse(src); | ||
240 | assert!(has_closing_quote, "char should have closing quote"); | ||
241 | components | ||
242 | } | ||
243 | |||
244 | fn range_closed(src: &str) -> TextRange { | ||
245 | TextRange::from_to(1.into(), (src.len() as u32 - 1).into()) | ||
246 | } | ||
247 | |||
248 | fn range_unclosed(src: &str) -> TextRange { | ||
249 | TextRange::from_to(1.into(), (src.len() as u32).into()) | ||
250 | } | ||
251 | |||
252 | #[test] | ||
253 | fn test_unicode_escapes() { | ||
254 | let unicode_escapes = &[r"{DEAD}", "{BEEF}", "{FF}", "{}", ""]; | ||
255 | for escape in unicode_escapes { | ||
256 | let escape_sequence = format!(r"'\u{}'", escape); | ||
257 | let component = closed_char_component(&escape_sequence); | ||
258 | let expected_range = range_closed(&escape_sequence); | ||
259 | assert_eq!(component.kind, UnicodeEscape); | ||
260 | assert_eq!(component.range, expected_range); | ||
261 | } | ||
262 | } | ||
263 | |||
264 | #[test] | ||
265 | fn test_unicode_escapes_unclosed() { | ||
266 | let unicode_escapes = &["{DEAD", "{BEEF", "{FF"]; | ||
267 | for escape in unicode_escapes { | ||
268 | let escape_sequence = format!(r"'\u{}'", escape); | ||
269 | let component = unclosed_char_component(&escape_sequence); | ||
270 | let expected_range = range_unclosed(&escape_sequence); | ||
271 | assert_eq!(component.kind, UnicodeEscape); | ||
272 | assert_eq!(component.range, expected_range); | ||
273 | } | ||
274 | } | ||
275 | |||
276 | #[test] | ||
277 | fn test_empty_char() { | ||
278 | let (has_closing_quote, components) = parse("''"); | ||
279 | assert!(has_closing_quote, "char should have closing quote"); | ||
280 | assert!(components.len() == 0); | ||
281 | } | ||
282 | |||
283 | #[test] | ||
284 | fn test_unclosed_char() { | ||
285 | let component = unclosed_char_component("'a"); | ||
286 | assert!(component.kind == CodePoint); | ||
287 | assert!(component.range == TextRange::from_to(1.into(), 2.into())); | ||
288 | } | ||
289 | |||
290 | #[test] | ||
291 | fn test_digit_escapes() { | ||
292 | let literals = &[r"", r"5", r"55"]; | ||
293 | |||
294 | for literal in literals { | ||
295 | let lit_text = format!(r"'\x{}'", literal); | ||
296 | let component = closed_char_component(&lit_text); | ||
297 | assert!(component.kind == AsciiCodeEscape); | ||
298 | assert!(component.range == range_closed(&lit_text)); | ||
299 | } | ||
300 | |||
301 | // More than 2 digits starts a new codepoint | ||
302 | let components = closed_char_components(r"'\x555'"); | ||
303 | assert!(components.len() == 2); | ||
304 | assert!(components[1].kind == CodePoint); | ||
305 | } | ||
306 | |||
307 | #[test] | ||
308 | fn test_ascii_escapes() { | ||
309 | let literals = &[ | ||
310 | r"\'", "\\\"", // equivalent to \" | ||
311 | r"\n", r"\r", r"\t", r"\\", r"\0", | ||
312 | ]; | ||
313 | |||
314 | for literal in literals { | ||
315 | let lit_text = format!("'{}'", literal); | ||
316 | let component = closed_char_component(&lit_text); | ||
317 | assert!(component.kind == AsciiEscape); | ||
318 | assert!(component.range == range_closed(&lit_text)); | ||
319 | } | ||
320 | } | ||
321 | |||
322 | #[test] | ||
323 | fn test_no_escapes() { | ||
324 | let literals = &['"', 'n', 'r', 't', '0', 'x', 'u']; | ||
325 | |||
326 | for &literal in literals { | ||
327 | let lit_text = format!("'{}'", literal); | ||
328 | let component = closed_char_component(&lit_text); | ||
329 | assert!(component.kind == CodePoint); | ||
330 | assert!(component.range == range_closed(&lit_text)); | ||
331 | } | ||
332 | } | ||
333 | } | ||
diff --git a/crates/ra_syntax/src/syntax_error.rs b/crates/ra_syntax/src/syntax_error.rs index 4198eefdb..27e12293b 100644 --- a/crates/ra_syntax/src/syntax_error.rs +++ b/crates/ra_syntax/src/syntax_error.rs | |||
@@ -2,7 +2,10 @@ use std::fmt; | |||
2 | 2 | ||
3 | use ra_parser::ParseError; | 3 | use ra_parser::ParseError; |
4 | 4 | ||
5 | use crate::{TextRange, TextUnit}; | 5 | use crate::{ |
6 | TextRange, TextUnit, | ||
7 | validation::EscapeError, | ||
8 | }; | ||
6 | 9 | ||
7 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 10 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
8 | pub struct SyntaxError { | 11 | pub struct SyntaxError { |
@@ -67,32 +70,7 @@ impl fmt::Display for SyntaxError { | |||
67 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 70 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
68 | pub enum SyntaxErrorKind { | 71 | pub enum SyntaxErrorKind { |
69 | ParseError(ParseError), | 72 | ParseError(ParseError), |
70 | UnescapedCodepoint, | 73 | EscapeError(EscapeError), |
71 | EmptyChar, | ||
72 | UnclosedChar, | ||
73 | OverlongChar, | ||
74 | EmptyByte, | ||
75 | UnclosedByte, | ||
76 | OverlongByte, | ||
77 | ByteOutOfRange, | ||
78 | UnescapedByte, | ||
79 | EmptyByteEscape, | ||
80 | InvalidByteEscape, | ||
81 | TooShortByteCodeEscape, | ||
82 | MalformedByteCodeEscape, | ||
83 | UnicodeEscapeForbidden, | ||
84 | EmptyAsciiEscape, | ||
85 | InvalidAsciiEscape, | ||
86 | TooShortAsciiCodeEscape, | ||
87 | AsciiCodeEscapeOutOfRange, | ||
88 | MalformedAsciiCodeEscape, | ||
89 | UnclosedUnicodeEscape, | ||
90 | MalformedUnicodeEscape, | ||
91 | EmptyUnicodeEcape, | ||
92 | OverlongUnicodeEscape, | ||
93 | UnicodeEscapeOutOfRange, | ||
94 | UnclosedString, | ||
95 | InvalidSuffix, | ||
96 | InvalidBlockAttr, | 74 | InvalidBlockAttr, |
97 | InvalidMatchInnerAttr, | 75 | InvalidMatchInnerAttr, |
98 | InvalidTupleIndexFormat, | 76 | InvalidTupleIndexFormat, |
@@ -102,38 +80,6 @@ impl fmt::Display for SyntaxErrorKind { | |||
102 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | 80 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
103 | use self::SyntaxErrorKind::*; | 81 | use self::SyntaxErrorKind::*; |
104 | match self { | 82 | match self { |
105 | UnescapedCodepoint => write!(f, "This codepoint should always be escaped"), | ||
106 | EmptyAsciiEscape => write!(f, "Empty escape sequence"), | ||
107 | InvalidAsciiEscape => write!(f, "Invalid escape sequence"), | ||
108 | EmptyChar => write!(f, "Empty char literal"), | ||
109 | UnclosedChar => write!(f, "Unclosed char literal"), | ||
110 | OverlongChar => write!(f, "Char literal should be one character long"), | ||
111 | EmptyByte => write!(f, "Empty byte literal"), | ||
112 | UnclosedByte => write!(f, "Unclosed byte literal"), | ||
113 | OverlongByte => write!(f, "Byte literal should be one character long"), | ||
114 | ByteOutOfRange => write!(f, "Byte should be a valid ASCII character"), | ||
115 | UnescapedByte => write!(f, "This byte should always be escaped"), | ||
116 | EmptyByteEscape => write!(f, "Empty escape sequence"), | ||
117 | InvalidByteEscape => write!(f, "Invalid escape sequence"), | ||
118 | TooShortByteCodeEscape => write!(f, "Escape sequence should have two digits"), | ||
119 | MalformedByteCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"), | ||
120 | UnicodeEscapeForbidden => { | ||
121 | write!(f, "Unicode escapes are not allowed in byte literals or byte strings") | ||
122 | } | ||
123 | TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"), | ||
124 | AsciiCodeEscapeOutOfRange => { | ||
125 | write!(f, "Escape sequence should be between \\x00 and \\x7F") | ||
126 | } | ||
127 | MalformedAsciiCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"), | ||
128 | UnclosedUnicodeEscape => write!(f, "Missing `}}`"), | ||
129 | MalformedUnicodeEscape => write!(f, "Malformed unicode escape sequence"), | ||
130 | EmptyUnicodeEcape => write!(f, "Empty unicode escape sequence"), | ||
131 | OverlongUnicodeEscape => { | ||
132 | write!(f, "Unicode escape sequence should have at most 6 digits") | ||
133 | } | ||
134 | UnicodeEscapeOutOfRange => write!(f, "Unicode escape code should be at most 0x10FFFF"), | ||
135 | UnclosedString => write!(f, "Unclosed string literal"), | ||
136 | InvalidSuffix => write!(f, "Invalid literal suffix"), | ||
137 | InvalidBlockAttr => { | 83 | InvalidBlockAttr => { |
138 | write!(f, "A block in this position cannot accept inner attributes") | 84 | write!(f, "A block in this position cannot accept inner attributes") |
139 | } | 85 | } |
@@ -144,6 +90,46 @@ impl fmt::Display for SyntaxErrorKind { | |||
144 | write!(f, "Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix") | 90 | write!(f, "Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix") |
145 | } | 91 | } |
146 | ParseError(msg) => write!(f, "{}", msg.0), | 92 | ParseError(msg) => write!(f, "{}", msg.0), |
93 | EscapeError(err) => write!(f, "{}", err), | ||
147 | } | 94 | } |
148 | } | 95 | } |
149 | } | 96 | } |
97 | |||
98 | impl fmt::Display for EscapeError { | ||
99 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
100 | let msg = match self { | ||
101 | EscapeError::ZeroChars => "Empty literal", | ||
102 | EscapeError::MoreThanOneChar => "Literal should be one character long", | ||
103 | EscapeError::LoneSlash => "Character must be escaped: '\\'", | ||
104 | EscapeError::InvalidEscape => "Invalid escape sequence", | ||
105 | EscapeError::BareCarriageReturn => "Character must be escaped: '\r'", | ||
106 | EscapeError::EscapeOnlyChar => "Character must be escaped", | ||
107 | EscapeError::TooShortHexEscape => "Escape sequence should have two digits", | ||
108 | EscapeError::InvalidCharInHexEscape => "Escape sequence should be a hexadecimal number", | ||
109 | EscapeError::OutOfRangeHexEscape => "Escape sequence should be ASCII", | ||
110 | EscapeError::NoBraceInUnicodeEscape => "Invalid escape sequence", | ||
111 | EscapeError::InvalidCharInUnicodeEscape => "Invalid escape sequence", | ||
112 | EscapeError::EmptyUnicodeEscape => "Invalid escape sequence", | ||
113 | EscapeError::UnclosedUnicodeEscape => "Missing '}'", | ||
114 | EscapeError::LeadingUnderscoreUnicodeEscape => "Invalid escape sequence", | ||
115 | EscapeError::OverlongUnicodeEscape => { | ||
116 | "Unicode escape sequence should have at most 6 digits" | ||
117 | } | ||
118 | EscapeError::LoneSurrogateUnicodeEscape => { | ||
119 | "Unicode escape code should not be a surrogate" | ||
120 | } | ||
121 | EscapeError::OutOfRangeUnicodeEscape => { | ||
122 | "Unicode escape code should be at most 0x10FFFF" | ||
123 | } | ||
124 | EscapeError::UnicodeEscapeInByte => "Unicode escapes are not allowed in bytes", | ||
125 | EscapeError::NonAsciiCharInByte => "Non ASCII characters are not allowed in bytes", | ||
126 | }; | ||
127 | write!(f, "{}", msg) | ||
128 | } | ||
129 | } | ||
130 | |||
131 | impl From<EscapeError> for SyntaxErrorKind { | ||
132 | fn from(err: EscapeError) -> Self { | ||
133 | SyntaxErrorKind::EscapeError(err) | ||
134 | } | ||
135 | } | ||
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index c2f545173..11a1fb4a7 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -1,17 +1,17 @@ | |||
1 | mod byte; | 1 | mod unescape; |
2 | mod byte_string; | 2 | |
3 | mod char; | ||
4 | mod string; | ||
5 | mod block; | 3 | mod block; |
6 | mod field_expr; | 4 | mod field_expr; |
7 | 5 | ||
8 | use crate::{ | 6 | use crate::{ |
9 | SourceFile, SyntaxError, AstNode, SyntaxNode, | 7 | SourceFile, SyntaxError, AstNode, SyntaxNode, TextUnit, |
10 | SyntaxKind::{L_CURLY, R_CURLY, BYTE, BYTE_STRING, STRING, CHAR}, | 8 | SyntaxKind::{L_CURLY, R_CURLY, BYTE, BYTE_STRING, STRING, CHAR}, |
11 | ast, | 9 | ast, |
12 | algo::visit::{visitor_ctx, VisitorCtx}, | 10 | algo::visit::{visitor_ctx, VisitorCtx}, |
13 | }; | 11 | }; |
14 | 12 | ||
13 | pub(crate) use unescape::EscapeError; | ||
14 | |||
15 | pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> { | 15 | pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> { |
16 | let mut errors = Vec::new(); | 16 | let mut errors = Vec::new(); |
17 | for node in file.syntax().descendants() { | 17 | for node in file.syntax().descendants() { |
@@ -26,11 +26,55 @@ pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> { | |||
26 | 26 | ||
27 | // FIXME: kill duplication | 27 | // FIXME: kill duplication |
28 | fn validate_literal(literal: &ast::Literal, acc: &mut Vec<SyntaxError>) { | 28 | fn validate_literal(literal: &ast::Literal, acc: &mut Vec<SyntaxError>) { |
29 | match literal.token().kind() { | 29 | let token = literal.token(); |
30 | BYTE => byte::validate_byte_node(literal.token(), acc), | 30 | let text = token.text().as_str(); |
31 | BYTE_STRING => byte_string::validate_byte_string_node(literal.token(), acc), | 31 | match token.kind() { |
32 | STRING => string::validate_string_node(literal.token(), acc), | 32 | BYTE => { |
33 | CHAR => char::validate_char_node(literal.token(), acc), | 33 | if let Some(end) = text.rfind('\'') { |
34 | if let Some(without_quotes) = text.get(2..end) { | ||
35 | if let Err((off, err)) = unescape::unescape_byte(without_quotes) { | ||
36 | let off = token.range().start() + TextUnit::from_usize(off + 2); | ||
37 | acc.push(SyntaxError::new(err.into(), off)) | ||
38 | } | ||
39 | } | ||
40 | } | ||
41 | } | ||
42 | CHAR => { | ||
43 | if let Some(end) = text.rfind('\'') { | ||
44 | if let Some(without_quotes) = text.get(1..end) { | ||
45 | if let Err((off, err)) = unescape::unescape_char(without_quotes) { | ||
46 | let off = token.range().start() + TextUnit::from_usize(off + 1); | ||
47 | acc.push(SyntaxError::new(err.into(), off)) | ||
48 | } | ||
49 | } | ||
50 | } | ||
51 | } | ||
52 | BYTE_STRING => { | ||
53 | if let Some(end) = text.rfind('\"') { | ||
54 | if let Some(without_quotes) = text.get(2..end) { | ||
55 | unescape::unescape_byte_str(without_quotes, &mut |range, char| { | ||
56 | if let Err(err) = char { | ||
57 | let off = range.start; | ||
58 | let off = token.range().start() + TextUnit::from_usize(off + 2); | ||
59 | acc.push(SyntaxError::new(err.into(), off)) | ||
60 | } | ||
61 | }) | ||
62 | } | ||
63 | } | ||
64 | } | ||
65 | STRING => { | ||
66 | if let Some(end) = text.rfind('\"') { | ||
67 | if let Some(without_quotes) = text.get(1..end) { | ||
68 | unescape::unescape_str(without_quotes, &mut |range, char| { | ||
69 | if let Err(err) = char { | ||
70 | let off = range.start; | ||
71 | let off = token.range().start() + TextUnit::from_usize(off + 1); | ||
72 | acc.push(SyntaxError::new(err.into(), off)) | ||
73 | } | ||
74 | }) | ||
75 | } | ||
76 | } | ||
77 | } | ||
34 | _ => (), | 78 | _ => (), |
35 | } | 79 | } |
36 | } | 80 | } |
diff --git a/crates/ra_syntax/src/validation/byte.rs b/crates/ra_syntax/src/validation/byte.rs deleted file mode 100644 index f653e65d0..000000000 --- a/crates/ra_syntax/src/validation/byte.rs +++ /dev/null | |||
@@ -1,199 +0,0 @@ | |||
1 | //! Validation of byte literals | ||
2 | |||
3 | use crate::{ | ||
4 | string_lexing::{self, StringComponentKind}, | ||
5 | TextRange, | ||
6 | validation::char, | ||
7 | SyntaxError, | ||
8 | SyntaxErrorKind::*, | ||
9 | SyntaxToken, | ||
10 | }; | ||
11 | |||
12 | pub(super) fn validate_byte_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { | ||
13 | let literal_text = node.text(); | ||
14 | let literal_range = node.range(); | ||
15 | let mut components = string_lexing::parse_quoted_literal(Some('b'), '\'', literal_text); | ||
16 | let mut len = 0; | ||
17 | for component in &mut components { | ||
18 | len += 1; | ||
19 | let text = &literal_text[component.range]; | ||
20 | let range = component.range + literal_range.start(); | ||
21 | validate_byte_component(text, component.kind, range, errors); | ||
22 | } | ||
23 | |||
24 | if !components.has_closing_quote { | ||
25 | errors.push(SyntaxError::new(UnclosedByte, literal_range)); | ||
26 | } | ||
27 | |||
28 | if let Some(range) = components.suffix { | ||
29 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); | ||
30 | } | ||
31 | |||
32 | if len == 0 { | ||
33 | errors.push(SyntaxError::new(EmptyByte, literal_range)); | ||
34 | } | ||
35 | |||
36 | if len > 1 { | ||
37 | errors.push(SyntaxError::new(OverlongByte, literal_range)); | ||
38 | } | ||
39 | } | ||
40 | |||
41 | pub(super) fn validate_byte_component( | ||
42 | text: &str, | ||
43 | kind: StringComponentKind, | ||
44 | range: TextRange, | ||
45 | errors: &mut Vec<SyntaxError>, | ||
46 | ) { | ||
47 | use self::StringComponentKind::*; | ||
48 | match kind { | ||
49 | AsciiEscape => validate_byte_escape(text, range, errors), | ||
50 | AsciiCodeEscape => validate_byte_code_escape(text, range, errors), | ||
51 | UnicodeEscape => errors.push(SyntaxError::new(UnicodeEscapeForbidden, range)), | ||
52 | CodePoint => { | ||
53 | let c = text.chars().next().expect("Code points should be one character long"); | ||
54 | |||
55 | // These bytes must always be escaped | ||
56 | if c == '\t' || c == '\r' || c == '\n' { | ||
57 | errors.push(SyntaxError::new(UnescapedByte, range)); | ||
58 | } | ||
59 | |||
60 | // Only ASCII bytes are allowed | ||
61 | if c > 0x7F as char { | ||
62 | errors.push(SyntaxError::new(ByteOutOfRange, range)); | ||
63 | } | ||
64 | } | ||
65 | IgnoreNewline => { /* always valid */ } | ||
66 | } | ||
67 | } | ||
68 | |||
69 | fn validate_byte_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) { | ||
70 | if text.len() == 1 { | ||
71 | // Escape sequence consists only of leading `\` | ||
72 | errors.push(SyntaxError::new(EmptyByteEscape, range)); | ||
73 | } else { | ||
74 | let escape_code = text.chars().skip(1).next().unwrap(); | ||
75 | if !char::is_ascii_escape(escape_code) { | ||
76 | errors.push(SyntaxError::new(InvalidByteEscape, range)); | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | |||
81 | fn validate_byte_code_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) { | ||
82 | // A ByteCodeEscape has 4 chars, example: `\xDD` | ||
83 | if !text.is_ascii() { | ||
84 | errors.push(SyntaxError::new(MalformedByteCodeEscape, range)); | ||
85 | } else if text.chars().count() < 4 { | ||
86 | errors.push(SyntaxError::new(TooShortByteCodeEscape, range)); | ||
87 | } else { | ||
88 | assert!(text.chars().count() == 4, "ByteCodeEscape cannot be longer than 4 chars"); | ||
89 | |||
90 | if u8::from_str_radix(&text[2..], 16).is_err() { | ||
91 | errors.push(SyntaxError::new(MalformedByteCodeEscape, range)); | ||
92 | } | ||
93 | } | ||
94 | } | ||
95 | |||
96 | #[cfg(test)] | ||
97 | mod test { | ||
98 | use crate::{SourceFile, TreeArc}; | ||
99 | |||
100 | fn build_file(literal: &str) -> TreeArc<SourceFile> { | ||
101 | let src = format!("const C: u8 = b'{}';", literal); | ||
102 | SourceFile::parse(&src) | ||
103 | } | ||
104 | |||
105 | fn assert_valid_byte(literal: &str) { | ||
106 | let file = build_file(literal); | ||
107 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); | ||
108 | } | ||
109 | |||
110 | fn assert_invalid_byte(literal: &str) { | ||
111 | let file = build_file(literal); | ||
112 | assert!(file.errors().len() > 0); | ||
113 | } | ||
114 | |||
115 | #[test] | ||
116 | fn test_ansi_codepoints() { | ||
117 | for byte in 0..128 { | ||
118 | match byte { | ||
119 | b'\n' | b'\r' | b'\t' => assert_invalid_byte(&(byte as char).to_string()), | ||
120 | b'\'' | b'\\' => { /* Ignore character close and backslash */ } | ||
121 | _ => assert_valid_byte(&(byte as char).to_string()), | ||
122 | } | ||
123 | } | ||
124 | |||
125 | for byte in 128..=255u8 { | ||
126 | assert_invalid_byte(&(byte as char).to_string()); | ||
127 | } | ||
128 | } | ||
129 | |||
130 | #[test] | ||
131 | fn test_unicode_codepoints() { | ||
132 | let invalid = ["Ƒ", "バ", "メ", "﷽"]; | ||
133 | for c in &invalid { | ||
134 | assert_invalid_byte(c); | ||
135 | } | ||
136 | } | ||
137 | |||
138 | #[test] | ||
139 | fn test_unicode_multiple_codepoints() { | ||
140 | let invalid = ["नी", "👨👨"]; | ||
141 | for c in &invalid { | ||
142 | assert_invalid_byte(c); | ||
143 | } | ||
144 | } | ||
145 | |||
146 | #[test] | ||
147 | fn test_valid_byte_escape() { | ||
148 | let valid = [r"\'", "\"", "\\\\", "\\\"", r"\n", r"\r", r"\t", r"\0"]; | ||
149 | for c in &valid { | ||
150 | assert_valid_byte(c); | ||
151 | } | ||
152 | } | ||
153 | |||
154 | #[test] | ||
155 | fn test_invalid_byte_escape() { | ||
156 | let invalid = [r"\a", r"\?", r"\"]; | ||
157 | for c in &invalid { | ||
158 | assert_invalid_byte(c); | ||
159 | } | ||
160 | } | ||
161 | |||
162 | #[test] | ||
163 | fn test_valid_byte_code_escape() { | ||
164 | let valid = [r"\x00", r"\x7F", r"\x55", r"\xF0"]; | ||
165 | for c in &valid { | ||
166 | assert_valid_byte(c); | ||
167 | } | ||
168 | } | ||
169 | |||
170 | #[test] | ||
171 | fn test_invalid_byte_code_escape() { | ||
172 | let invalid = [r"\x", r"\x7"]; | ||
173 | for c in &invalid { | ||
174 | assert_invalid_byte(c); | ||
175 | } | ||
176 | } | ||
177 | |||
178 | #[test] | ||
179 | fn test_invalid_unicode_escape() { | ||
180 | let well_formed = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; | ||
181 | for c in &well_formed { | ||
182 | assert_invalid_byte(c); | ||
183 | } | ||
184 | |||
185 | let invalid = [ | ||
186 | r"\u", | ||
187 | r"\u{}", | ||
188 | r"\u{", | ||
189 | r"\u{FF", | ||
190 | r"\u{FFFFFF}", | ||
191 | r"\u{_F}", | ||
192 | r"\u{00FFFFF}", | ||
193 | r"\u{110000}", | ||
194 | ]; | ||
195 | for c in &invalid { | ||
196 | assert_invalid_byte(c); | ||
197 | } | ||
198 | } | ||
199 | } | ||
diff --git a/crates/ra_syntax/src/validation/byte_string.rs b/crates/ra_syntax/src/validation/byte_string.rs deleted file mode 100644 index 1d48c2d9b..000000000 --- a/crates/ra_syntax/src/validation/byte_string.rs +++ /dev/null | |||
@@ -1,169 +0,0 @@ | |||
1 | use crate::{ | ||
2 | string_lexing::{self, StringComponentKind}, | ||
3 | SyntaxError, | ||
4 | SyntaxErrorKind::*, | ||
5 | SyntaxToken, | ||
6 | }; | ||
7 | |||
8 | use super::byte; | ||
9 | |||
10 | pub(crate) fn validate_byte_string_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { | ||
11 | let literal_text = node.text(); | ||
12 | let literal_range = node.range(); | ||
13 | let mut components = string_lexing::parse_quoted_literal(Some('b'), '"', literal_text); | ||
14 | for component in &mut components { | ||
15 | let range = component.range + literal_range.start(); | ||
16 | |||
17 | match component.kind { | ||
18 | StringComponentKind::IgnoreNewline => { /* always valid */ } | ||
19 | _ => { | ||
20 | // Chars must escape \t, \n and \r codepoints, but strings don't | ||
21 | let text = &literal_text[component.range]; | ||
22 | match text { | ||
23 | "\t" | "\n" | "\r" => { /* always valid */ } | ||
24 | _ => byte::validate_byte_component(text, component.kind, range, errors), | ||
25 | } | ||
26 | } | ||
27 | } | ||
28 | } | ||
29 | |||
30 | if !components.has_closing_quote { | ||
31 | errors.push(SyntaxError::new(UnclosedString, literal_range)); | ||
32 | } | ||
33 | |||
34 | if let Some(range) = components.suffix { | ||
35 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); | ||
36 | } | ||
37 | } | ||
38 | |||
39 | #[cfg(test)] | ||
40 | mod test { | ||
41 | use crate::{SourceFile, TreeArc}; | ||
42 | |||
43 | fn build_file(literal: &str) -> TreeArc<SourceFile> { | ||
44 | let src = format!(r#"const S: &'static [u8] = b"{}";"#, literal); | ||
45 | println!("Source: {}", src); | ||
46 | SourceFile::parse(&src) | ||
47 | } | ||
48 | |||
49 | fn assert_valid_str(literal: &str) { | ||
50 | let file = build_file(literal); | ||
51 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); | ||
52 | } | ||
53 | |||
54 | fn assert_invalid_str(literal: &str) { | ||
55 | let file = build_file(literal); | ||
56 | assert!(file.errors().len() > 0); | ||
57 | } | ||
58 | |||
59 | #[test] | ||
60 | fn test_ansi_codepoints() { | ||
61 | for byte in 0..128 { | ||
62 | match byte { | ||
63 | b'\"' | b'\\' => { /* Ignore string close and backslash */ } | ||
64 | _ => assert_valid_str(&(byte as char).to_string()), | ||
65 | } | ||
66 | } | ||
67 | |||
68 | for byte in 128..=255u8 { | ||
69 | assert_invalid_str(&(byte as char).to_string()); | ||
70 | } | ||
71 | } | ||
72 | |||
73 | #[test] | ||
74 | fn test_unicode_codepoints() { | ||
75 | let invalid = ["Ƒ", "バ", "メ", "﷽"]; | ||
76 | for c in &invalid { | ||
77 | assert_invalid_str(c); | ||
78 | } | ||
79 | } | ||
80 | |||
81 | #[test] | ||
82 | fn test_unicode_multiple_codepoints() { | ||
83 | let invalid = ["नी", "👨👨"]; | ||
84 | for c in &invalid { | ||
85 | assert_invalid_str(c); | ||
86 | } | ||
87 | } | ||
88 | |||
89 | #[test] | ||
90 | fn test_valid_ascii_escape() { | ||
91 | let valid = [r"\'", r#"\""#, r"\\", r"\n", r"\r", r"\t", r"\0", "a", "b"]; | ||
92 | for c in &valid { | ||
93 | assert_valid_str(c); | ||
94 | } | ||
95 | } | ||
96 | |||
97 | #[test] | ||
98 | fn test_invalid_ascii_escape() { | ||
99 | let invalid = [r"\a", r"\?", r"\"]; | ||
100 | for c in &invalid { | ||
101 | assert_invalid_str(c); | ||
102 | } | ||
103 | } | ||
104 | |||
105 | #[test] | ||
106 | fn test_valid_ascii_code_escape() { | ||
107 | let valid = [r"\x00", r"\x7F", r"\x55", r"\xF0"]; | ||
108 | for c in &valid { | ||
109 | assert_valid_str(c); | ||
110 | } | ||
111 | } | ||
112 | |||
113 | #[test] | ||
114 | fn test_invalid_ascii_code_escape() { | ||
115 | let invalid = [r"\x", r"\x7"]; | ||
116 | for c in &invalid { | ||
117 | assert_invalid_str(c); | ||
118 | } | ||
119 | } | ||
120 | |||
121 | #[test] | ||
122 | fn test_invalid_unicode_escape() { | ||
123 | let well_formed = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; | ||
124 | for c in &well_formed { | ||
125 | assert_invalid_str(c); | ||
126 | } | ||
127 | |||
128 | let invalid = [ | ||
129 | r"\u", | ||
130 | r"\u{}", | ||
131 | r"\u{", | ||
132 | r"\u{FF", | ||
133 | r"\u{FFFFFF}", | ||
134 | r"\u{_F}", | ||
135 | r"\u{00FFFFF}", | ||
136 | r"\u{110000}", | ||
137 | ]; | ||
138 | for c in &invalid { | ||
139 | assert_invalid_str(c); | ||
140 | } | ||
141 | } | ||
142 | |||
143 | #[test] | ||
144 | fn test_mixed_invalid() { | ||
145 | assert_invalid_str( | ||
146 | r"This is the tale of a string | ||
147 | with a newline in between, some emoji (👨👨) here and there, | ||
148 | unicode escapes like this: \u{1FFBB} and weird stuff like | ||
149 | this ﷽", | ||
150 | ); | ||
151 | } | ||
152 | |||
153 | #[test] | ||
154 | fn test_mixed_valid() { | ||
155 | assert_valid_str( | ||
156 | r"This is the tale of a string | ||
157 | with a newline in between, no emoji at all, | ||
158 | nor unicode escapes or weird stuff", | ||
159 | ); | ||
160 | } | ||
161 | |||
162 | #[test] | ||
163 | fn test_ignore_newline() { | ||
164 | assert_valid_str( | ||
165 | "Hello \ | ||
166 | World", | ||
167 | ); | ||
168 | } | ||
169 | } | ||
diff --git a/crates/ra_syntax/src/validation/char.rs b/crates/ra_syntax/src/validation/char.rs deleted file mode 100644 index 0f1885873..000000000 --- a/crates/ra_syntax/src/validation/char.rs +++ /dev/null | |||
@@ -1,273 +0,0 @@ | |||
1 | //! Validation of char literals | ||
2 | |||
3 | use std::u32; | ||
4 | |||
5 | use arrayvec::ArrayString; | ||
6 | |||
7 | use crate::{ | ||
8 | string_lexing::{self, StringComponentKind}, | ||
9 | TextRange, | ||
10 | SyntaxError, | ||
11 | SyntaxErrorKind::*, | ||
12 | SyntaxToken, | ||
13 | }; | ||
14 | |||
15 | pub(super) fn validate_char_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { | ||
16 | let literal_text = node.text(); | ||
17 | let literal_range = node.range(); | ||
18 | let mut components = string_lexing::parse_quoted_literal(None, '\'', literal_text); | ||
19 | let mut len = 0; | ||
20 | for component in &mut components { | ||
21 | len += 1; | ||
22 | let text = &literal_text[component.range]; | ||
23 | let range = component.range + literal_range.start(); | ||
24 | validate_char_component(text, component.kind, range, errors); | ||
25 | } | ||
26 | |||
27 | if !components.has_closing_quote { | ||
28 | errors.push(SyntaxError::new(UnclosedChar, literal_range)); | ||
29 | } | ||
30 | |||
31 | if let Some(range) = components.suffix { | ||
32 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); | ||
33 | } | ||
34 | |||
35 | if len == 0 { | ||
36 | errors.push(SyntaxError::new(EmptyChar, literal_range)); | ||
37 | } | ||
38 | |||
39 | if len > 1 { | ||
40 | errors.push(SyntaxError::new(OverlongChar, literal_range)); | ||
41 | } | ||
42 | } | ||
43 | |||
44 | pub(super) fn validate_char_component( | ||
45 | text: &str, | ||
46 | kind: StringComponentKind, | ||
47 | range: TextRange, | ||
48 | errors: &mut Vec<SyntaxError>, | ||
49 | ) { | ||
50 | // Validate escapes | ||
51 | use self::StringComponentKind::*; | ||
52 | match kind { | ||
53 | AsciiEscape => validate_ascii_escape(text, range, errors), | ||
54 | AsciiCodeEscape => validate_ascii_code_escape(text, range, errors), | ||
55 | UnicodeEscape => validate_unicode_escape(text, range, errors), | ||
56 | CodePoint => { | ||
57 | // These code points must always be escaped | ||
58 | if text == "\t" || text == "\r" || text == "\n" { | ||
59 | errors.push(SyntaxError::new(UnescapedCodepoint, range)); | ||
60 | } | ||
61 | } | ||
62 | StringComponentKind::IgnoreNewline => { /* always valid */ } | ||
63 | } | ||
64 | } | ||
65 | |||
66 | fn validate_ascii_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) { | ||
67 | if text.len() == 1 { | ||
68 | // Escape sequence consists only of leading `\` (only occurs at EOF, otherwise e.g. '\' is treated as an unclosed char containing a single quote `'`) | ||
69 | errors.push(SyntaxError::new(EmptyAsciiEscape, range)); | ||
70 | } else { | ||
71 | let escape_code = text.chars().skip(1).next().unwrap(); | ||
72 | if !is_ascii_escape(escape_code) { | ||
73 | errors.push(SyntaxError::new(InvalidAsciiEscape, range)); | ||
74 | } | ||
75 | } | ||
76 | } | ||
77 | |||
78 | pub(super) fn is_ascii_escape(code: char) -> bool { | ||
79 | match code { | ||
80 | '\\' | '\'' | '"' | 'n' | 'r' | 't' | '0' => true, | ||
81 | _ => false, | ||
82 | } | ||
83 | } | ||
84 | |||
85 | fn validate_ascii_code_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) { | ||
86 | // An AsciiCodeEscape has 4 chars, example: `\xDD` | ||
87 | if !text.is_ascii() { | ||
88 | // FIXME: Give a more precise error message (say what the invalid character was) | ||
89 | errors.push(SyntaxError::new(AsciiCodeEscapeOutOfRange, range)); | ||
90 | } else if text.chars().count() < 4 { | ||
91 | errors.push(SyntaxError::new(TooShortAsciiCodeEscape, range)); | ||
92 | } else { | ||
93 | assert_eq!( | ||
94 | text.chars().count(), | ||
95 | 4, | ||
96 | "AsciiCodeEscape cannot be longer than 4 chars, but text '{}' is", | ||
97 | text, | ||
98 | ); | ||
99 | |||
100 | match u8::from_str_radix(&text[2..], 16) { | ||
101 | Ok(code) if code < 128 => { /* Escape code is valid */ } | ||
102 | Ok(_) => errors.push(SyntaxError::new(AsciiCodeEscapeOutOfRange, range)), | ||
103 | Err(_) => errors.push(SyntaxError::new(MalformedAsciiCodeEscape, range)), | ||
104 | } | ||
105 | } | ||
106 | } | ||
107 | |||
108 | fn validate_unicode_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) { | ||
109 | assert!(&text[..2] == "\\u", "UnicodeEscape always starts with \\u"); | ||
110 | |||
111 | if text.len() == 2 { | ||
112 | // No starting `{` | ||
113 | errors.push(SyntaxError::new(MalformedUnicodeEscape, range)); | ||
114 | return; | ||
115 | } | ||
116 | |||
117 | if text.len() == 3 { | ||
118 | // Only starting `{` | ||
119 | errors.push(SyntaxError::new(UnclosedUnicodeEscape, range)); | ||
120 | return; | ||
121 | } | ||
122 | |||
123 | let mut code = ArrayString::<[_; 6]>::new(); | ||
124 | let mut closed = false; | ||
125 | for c in text[3..].chars() { | ||
126 | assert!(!closed, "no characters after escape is closed"); | ||
127 | |||
128 | if c.is_digit(16) { | ||
129 | if code.len() == 6 { | ||
130 | errors.push(SyntaxError::new(OverlongUnicodeEscape, range)); | ||
131 | return; | ||
132 | } | ||
133 | |||
134 | code.push(c); | ||
135 | } else if c == '_' { | ||
136 | // Reject leading _ | ||
137 | if code.len() == 0 { | ||
138 | errors.push(SyntaxError::new(MalformedUnicodeEscape, range)); | ||
139 | return; | ||
140 | } | ||
141 | } else if c == '}' { | ||
142 | closed = true; | ||
143 | } else { | ||
144 | errors.push(SyntaxError::new(MalformedUnicodeEscape, range)); | ||
145 | return; | ||
146 | } | ||
147 | } | ||
148 | |||
149 | if !closed { | ||
150 | errors.push(SyntaxError::new(UnclosedUnicodeEscape, range)) | ||
151 | } | ||
152 | |||
153 | if code.len() == 0 { | ||
154 | errors.push(SyntaxError::new(EmptyUnicodeEcape, range)); | ||
155 | return; | ||
156 | } | ||
157 | |||
158 | match u32::from_str_radix(&code, 16) { | ||
159 | Ok(code_u32) if code_u32 > 0x10FFFF => { | ||
160 | errors.push(SyntaxError::new(UnicodeEscapeOutOfRange, range)); | ||
161 | } | ||
162 | Ok(_) => { | ||
163 | // Valid escape code | ||
164 | } | ||
165 | Err(_) => { | ||
166 | errors.push(SyntaxError::new(MalformedUnicodeEscape, range)); | ||
167 | } | ||
168 | } | ||
169 | } | ||
170 | |||
171 | #[cfg(test)] | ||
172 | mod test { | ||
173 | use crate::{SourceFile, TreeArc}; | ||
174 | |||
175 | fn build_file(literal: &str) -> TreeArc<SourceFile> { | ||
176 | let src = format!("const C: char = '{}';", literal); | ||
177 | SourceFile::parse(&src) | ||
178 | } | ||
179 | |||
180 | fn assert_valid_char(literal: &str) { | ||
181 | let file = build_file(literal); | ||
182 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); | ||
183 | } | ||
184 | |||
185 | fn assert_invalid_char(literal: &str) { | ||
186 | let file = build_file(literal); | ||
187 | assert!(file.errors().len() > 0); | ||
188 | } | ||
189 | |||
190 | #[test] | ||
191 | fn test_ansi_codepoints() { | ||
192 | for byte in 0..=255u8 { | ||
193 | match byte { | ||
194 | b'\n' | b'\r' | b'\t' => assert_invalid_char(&(byte as char).to_string()), | ||
195 | b'\'' | b'\\' => { /* Ignore character close and backslash */ } | ||
196 | _ => assert_valid_char(&(byte as char).to_string()), | ||
197 | } | ||
198 | } | ||
199 | } | ||
200 | |||
201 | #[test] | ||
202 | fn test_unicode_codepoints() { | ||
203 | let valid = ["Ƒ", "バ", "メ", "﷽"]; | ||
204 | for c in &valid { | ||
205 | assert_valid_char(c); | ||
206 | } | ||
207 | } | ||
208 | |||
209 | #[test] | ||
210 | fn test_unicode_multiple_codepoints() { | ||
211 | let invalid = ["नी", "👨👨"]; | ||
212 | for c in &invalid { | ||
213 | assert_invalid_char(c); | ||
214 | } | ||
215 | } | ||
216 | |||
217 | #[test] | ||
218 | fn test_valid_ascii_escape() { | ||
219 | let valid = [r"\'", "\"", "\\\\", "\\\"", r"\n", r"\r", r"\t", r"\0"]; | ||
220 | for c in &valid { | ||
221 | assert_valid_char(c); | ||
222 | } | ||
223 | } | ||
224 | |||
225 | #[test] | ||
226 | fn test_invalid_ascii_escape() { | ||
227 | let invalid = [r"\a", r"\?", r"\"]; | ||
228 | for c in &invalid { | ||
229 | assert_invalid_char(c); | ||
230 | } | ||
231 | } | ||
232 | |||
233 | #[test] | ||
234 | fn test_valid_ascii_code_escape() { | ||
235 | let valid = [r"\x00", r"\x7F", r"\x55"]; | ||
236 | for c in &valid { | ||
237 | assert_valid_char(c); | ||
238 | } | ||
239 | } | ||
240 | |||
241 | #[test] | ||
242 | fn test_invalid_ascii_code_escape() { | ||
243 | let invalid = [r"\x", r"\x7", r"\xF0"]; | ||
244 | for c in &invalid { | ||
245 | assert_invalid_char(c); | ||
246 | } | ||
247 | } | ||
248 | |||
249 | #[test] | ||
250 | fn test_valid_unicode_escape() { | ||
251 | let valid = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; | ||
252 | for c in &valid { | ||
253 | assert_valid_char(c); | ||
254 | } | ||
255 | } | ||
256 | |||
257 | #[test] | ||
258 | fn test_invalid_unicode_escape() { | ||
259 | let invalid = [ | ||
260 | r"\u", | ||
261 | r"\u{}", | ||
262 | r"\u{", | ||
263 | r"\u{FF", | ||
264 | r"\u{FFFFFF}", | ||
265 | r"\u{_F}", | ||
266 | r"\u{00FFFFF}", | ||
267 | r"\u{110000}", | ||
268 | ]; | ||
269 | for c in &invalid { | ||
270 | assert_invalid_char(c); | ||
271 | } | ||
272 | } | ||
273 | } | ||
diff --git a/crates/ra_syntax/src/validation/string.rs b/crates/ra_syntax/src/validation/string.rs deleted file mode 100644 index fc2f1b992..000000000 --- a/crates/ra_syntax/src/validation/string.rs +++ /dev/null | |||
@@ -1,154 +0,0 @@ | |||
1 | use crate::{ | ||
2 | string_lexing, | ||
3 | SyntaxError, | ||
4 | SyntaxErrorKind::*, | ||
5 | SyntaxToken, | ||
6 | }; | ||
7 | |||
8 | use super::char; | ||
9 | |||
10 | pub(crate) fn validate_string_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) { | ||
11 | let literal_text = node.text(); | ||
12 | let literal_range = node.range(); | ||
13 | let mut components = string_lexing::parse_quoted_literal(None, '"', literal_text); | ||
14 | for component in &mut components { | ||
15 | let range = component.range + literal_range.start(); | ||
16 | |||
17 | // Chars must escape \t, \n and \r codepoints, but strings don't | ||
18 | let text = &literal_text[component.range]; | ||
19 | match text { | ||
20 | "\t" | "\n" | "\r" => { /* always valid */ } | ||
21 | _ => char::validate_char_component(text, component.kind, range, errors), | ||
22 | } | ||
23 | } | ||
24 | |||
25 | if !components.has_closing_quote { | ||
26 | errors.push(SyntaxError::new(UnclosedString, literal_range)); | ||
27 | } | ||
28 | |||
29 | if let Some(range) = components.suffix { | ||
30 | errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start())); | ||
31 | } | ||
32 | } | ||
33 | |||
34 | #[cfg(test)] | ||
35 | mod test { | ||
36 | use crate::{SourceFile, TreeArc}; | ||
37 | |||
38 | fn build_file(literal: &str) -> TreeArc<SourceFile> { | ||
39 | let src = format!(r#"const S: &'static str = "{}";"#, literal); | ||
40 | println!("Source: {}", src); | ||
41 | SourceFile::parse(&src) | ||
42 | } | ||
43 | |||
44 | fn assert_valid_str(literal: &str) { | ||
45 | let file = build_file(literal); | ||
46 | assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors()); | ||
47 | } | ||
48 | |||
49 | fn assert_invalid_str(literal: &str) { | ||
50 | let file = build_file(literal); | ||
51 | assert!(file.errors().len() > 0); | ||
52 | } | ||
53 | |||
54 | #[test] | ||
55 | fn test_ansi_codepoints() { | ||
56 | for byte in 0..=255u8 { | ||
57 | match byte { | ||
58 | b'\"' | b'\\' => { /* Ignore string close and backslash */ } | ||
59 | _ => assert_valid_str(&(byte as char).to_string()), | ||
60 | } | ||
61 | } | ||
62 | } | ||
63 | |||
64 | #[test] | ||
65 | fn test_unicode_codepoints() { | ||
66 | let valid = ["Ƒ", "バ", "メ", "﷽"]; | ||
67 | for c in &valid { | ||
68 | assert_valid_str(c); | ||
69 | } | ||
70 | } | ||
71 | |||
72 | #[test] | ||
73 | fn test_unicode_multiple_codepoints() { | ||
74 | let valid = ["नी", "👨👨"]; | ||
75 | for c in &valid { | ||
76 | assert_valid_str(c); | ||
77 | } | ||
78 | } | ||
79 | |||
80 | #[test] | ||
81 | fn test_valid_ascii_escape() { | ||
82 | let valid = [r"\'", r#"\""#, r"\\", r"\n", r"\r", r"\t", r"\0", "a", "b"]; | ||
83 | for c in &valid { | ||
84 | assert_valid_str(c); | ||
85 | } | ||
86 | } | ||
87 | |||
88 | #[test] | ||
89 | fn test_invalid_ascii_escape() { | ||
90 | let invalid = [r"\a", r"\?", r"\"]; | ||
91 | for c in &invalid { | ||
92 | assert_invalid_str(c); | ||
93 | } | ||
94 | } | ||
95 | |||
96 | #[test] | ||
97 | fn test_valid_ascii_code_escape() { | ||
98 | let valid = [r"\x00", r"\x7F", r"\x55"]; | ||
99 | for c in &valid { | ||
100 | assert_valid_str(c); | ||
101 | } | ||
102 | } | ||
103 | |||
104 | #[test] | ||
105 | fn test_invalid_ascii_code_escape() { | ||
106 | let invalid = [r"\x", r"\x7", r"\xF0"]; | ||
107 | for c in &invalid { | ||
108 | assert_invalid_str(c); | ||
109 | } | ||
110 | } | ||
111 | |||
112 | #[test] | ||
113 | fn test_valid_unicode_escape() { | ||
114 | let valid = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"]; | ||
115 | for c in &valid { | ||
116 | assert_valid_str(c); | ||
117 | } | ||
118 | } | ||
119 | |||
120 | #[test] | ||
121 | fn test_invalid_unicode_escape() { | ||
122 | let invalid = [ | ||
123 | r"\u", | ||
124 | r"\u{}", | ||
125 | r"\u{", | ||
126 | r"\u{FF", | ||
127 | r"\u{FFFFFF}", | ||
128 | r"\u{_F}", | ||
129 | r"\u{00FFFFF}", | ||
130 | r"\u{110000}", | ||
131 | ]; | ||
132 | for c in &invalid { | ||
133 | assert_invalid_str(c); | ||
134 | } | ||
135 | } | ||
136 | |||
137 | #[test] | ||
138 | fn test_mixed() { | ||
139 | assert_valid_str( | ||
140 | r"This is the tale of a string | ||
141 | with a newline in between, some emoji (👨👨) here and there, | ||
142 | unicode escapes like this: \u{1FFBB} and weird stuff like | ||
143 | this ﷽", | ||
144 | ); | ||
145 | } | ||
146 | |||
147 | #[test] | ||
148 | fn test_ignore_newline() { | ||
149 | assert_valid_str( | ||
150 | "Hello \ | ||
151 | World", | ||
152 | ); | ||
153 | } | ||
154 | } | ||
diff --git a/crates/ra_syntax/src/validation/unescape.rs b/crates/ra_syntax/src/validation/unescape.rs new file mode 100644 index 000000000..2086046b6 --- /dev/null +++ b/crates/ra_syntax/src/validation/unescape.rs | |||
@@ -0,0 +1,521 @@ | |||
1 | //! Utilities for validating string and char literals and turning them into | ||
2 | //! values they represent. | ||
3 | //! | ||
4 | //! This file is copy-pasted from the compiler | ||
5 | //! | ||
6 | //! https://github.com/rust-lang/rust/blob/c6ac57564852cb6e2d0db60f7b46d9eb98d4b449/src/libsyntax/parse/unescape.rs | ||
7 | //! | ||
8 | //! Hopefully, we'll share this code in a proper way some day | ||
9 | |||
10 | use std::str::Chars; | ||
11 | use std::ops::Range; | ||
12 | |||
13 | #[derive(Debug, PartialEq, Eq, Clone, Hash)] | ||
14 | pub enum EscapeError { | ||
15 | ZeroChars, | ||
16 | MoreThanOneChar, | ||
17 | |||
18 | LoneSlash, | ||
19 | InvalidEscape, | ||
20 | BareCarriageReturn, | ||
21 | EscapeOnlyChar, | ||
22 | |||
23 | TooShortHexEscape, | ||
24 | InvalidCharInHexEscape, | ||
25 | OutOfRangeHexEscape, | ||
26 | |||
27 | NoBraceInUnicodeEscape, | ||
28 | InvalidCharInUnicodeEscape, | ||
29 | EmptyUnicodeEscape, | ||
30 | UnclosedUnicodeEscape, | ||
31 | LeadingUnderscoreUnicodeEscape, | ||
32 | OverlongUnicodeEscape, | ||
33 | LoneSurrogateUnicodeEscape, | ||
34 | OutOfRangeUnicodeEscape, | ||
35 | |||
36 | UnicodeEscapeInByte, | ||
37 | NonAsciiCharInByte, | ||
38 | } | ||
39 | |||
40 | /// Takes a contents of a char literal (without quotes), and returns an | ||
41 | /// unescaped char or an error | ||
42 | pub(crate) fn unescape_char(literal_text: &str) -> Result<char, (usize, EscapeError)> { | ||
43 | let mut chars = literal_text.chars(); | ||
44 | unescape_char_or_byte(&mut chars, Mode::Char) | ||
45 | .map_err(|err| (literal_text.len() - chars.as_str().len(), err)) | ||
46 | } | ||
47 | |||
48 | /// Takes a contents of a string literal (without quotes) and produces a | ||
49 | /// sequence of escaped characters or errors. | ||
50 | pub(crate) fn unescape_str<F>(literal_text: &str, callback: &mut F) | ||
51 | where | ||
52 | F: FnMut(Range<usize>, Result<char, EscapeError>), | ||
53 | { | ||
54 | unescape_str_or_byte_str(literal_text, Mode::Str, callback) | ||
55 | } | ||
56 | |||
57 | pub(crate) fn unescape_byte(literal_text: &str) -> Result<u8, (usize, EscapeError)> { | ||
58 | let mut chars = literal_text.chars(); | ||
59 | unescape_char_or_byte(&mut chars, Mode::Byte) | ||
60 | .map(byte_from_char) | ||
61 | .map_err(|err| (literal_text.len() - chars.as_str().len(), err)) | ||
62 | } | ||
63 | |||
64 | /// Takes a contents of a string literal (without quotes) and produces a | ||
65 | /// sequence of escaped characters or errors. | ||
66 | pub(crate) fn unescape_byte_str<F>(literal_text: &str, callback: &mut F) | ||
67 | where | ||
68 | F: FnMut(Range<usize>, Result<u8, EscapeError>), | ||
69 | { | ||
70 | unescape_str_or_byte_str(literal_text, Mode::ByteStr, &mut |range, char| { | ||
71 | callback(range, char.map(byte_from_char)) | ||
72 | }) | ||
73 | } | ||
74 | |||
75 | #[derive(Debug, Clone, Copy)] | ||
76 | pub(crate) enum Mode { | ||
77 | Char, | ||
78 | Str, | ||
79 | Byte, | ||
80 | ByteStr, | ||
81 | } | ||
82 | |||
83 | impl Mode { | ||
84 | fn in_single_quotes(self) -> bool { | ||
85 | match self { | ||
86 | Mode::Char | Mode::Byte => true, | ||
87 | Mode::Str | Mode::ByteStr => false, | ||
88 | } | ||
89 | } | ||
90 | |||
91 | pub(crate) fn in_double_quotes(self) -> bool { | ||
92 | !self.in_single_quotes() | ||
93 | } | ||
94 | |||
95 | pub(crate) fn is_bytes(self) -> bool { | ||
96 | match self { | ||
97 | Mode::Byte | Mode::ByteStr => true, | ||
98 | Mode::Char | Mode::Str => false, | ||
99 | } | ||
100 | } | ||
101 | } | ||
102 | |||
103 | fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> { | ||
104 | if first_char != '\\' { | ||
105 | return match first_char { | ||
106 | '\t' | '\n' => Err(EscapeError::EscapeOnlyChar), | ||
107 | '\r' => Err(if chars.clone().next() == Some('\n') { | ||
108 | EscapeError::EscapeOnlyChar | ||
109 | } else { | ||
110 | EscapeError::BareCarriageReturn | ||
111 | }), | ||
112 | '\'' if mode.in_single_quotes() => Err(EscapeError::EscapeOnlyChar), | ||
113 | '"' if mode.in_double_quotes() => Err(EscapeError::EscapeOnlyChar), | ||
114 | _ => { | ||
115 | if mode.is_bytes() && !first_char.is_ascii() { | ||
116 | return Err(EscapeError::NonAsciiCharInByte); | ||
117 | } | ||
118 | Ok(first_char) | ||
119 | } | ||
120 | }; | ||
121 | } | ||
122 | |||
123 | let second_char = chars.next().ok_or(EscapeError::LoneSlash)?; | ||
124 | |||
125 | let res = match second_char { | ||
126 | '"' => '"', | ||
127 | 'n' => '\n', | ||
128 | 'r' => '\r', | ||
129 | 't' => '\t', | ||
130 | '\\' => '\\', | ||
131 | '\'' => '\'', | ||
132 | '0' => '\0', | ||
133 | |||
134 | 'x' => { | ||
135 | let hi = chars.next().ok_or(EscapeError::TooShortHexEscape)?; | ||
136 | let hi = hi.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; | ||
137 | |||
138 | let lo = chars.next().ok_or(EscapeError::TooShortHexEscape)?; | ||
139 | let lo = lo.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; | ||
140 | |||
141 | let value = hi * 16 + lo; | ||
142 | |||
143 | if !mode.is_bytes() && !is_ascii(value) { | ||
144 | return Err(EscapeError::OutOfRangeHexEscape); | ||
145 | } | ||
146 | let value = value as u8; | ||
147 | |||
148 | value as char | ||
149 | } | ||
150 | |||
151 | 'u' => { | ||
152 | if chars.next() != Some('{') { | ||
153 | return Err(EscapeError::NoBraceInUnicodeEscape); | ||
154 | } | ||
155 | |||
156 | let mut n_digits = 1; | ||
157 | let mut value: u32 = match chars.next().ok_or(EscapeError::UnclosedUnicodeEscape)? { | ||
158 | '_' => return Err(EscapeError::LeadingUnderscoreUnicodeEscape), | ||
159 | '}' => return Err(EscapeError::EmptyUnicodeEscape), | ||
160 | c => c.to_digit(16).ok_or(EscapeError::InvalidCharInUnicodeEscape)?, | ||
161 | }; | ||
162 | |||
163 | loop { | ||
164 | match chars.next() { | ||
165 | None => return Err(EscapeError::UnclosedUnicodeEscape), | ||
166 | Some('_') => continue, | ||
167 | Some('}') => { | ||
168 | if n_digits > 6 { | ||
169 | return Err(EscapeError::OverlongUnicodeEscape); | ||
170 | } | ||
171 | if mode.is_bytes() { | ||
172 | return Err(EscapeError::UnicodeEscapeInByte); | ||
173 | } | ||
174 | |||
175 | break std::char::from_u32(value).ok_or_else(|| { | ||
176 | if value > 0x10FFFF { | ||
177 | EscapeError::OutOfRangeUnicodeEscape | ||
178 | } else { | ||
179 | EscapeError::LoneSurrogateUnicodeEscape | ||
180 | } | ||
181 | })?; | ||
182 | } | ||
183 | Some(c) => { | ||
184 | let digit = | ||
185 | c.to_digit(16).ok_or(EscapeError::InvalidCharInUnicodeEscape)?; | ||
186 | n_digits += 1; | ||
187 | if n_digits > 6 { | ||
188 | continue; | ||
189 | } | ||
190 | let digit = digit as u32; | ||
191 | value = value * 16 + digit; | ||
192 | } | ||
193 | }; | ||
194 | } | ||
195 | } | ||
196 | _ => return Err(EscapeError::InvalidEscape), | ||
197 | }; | ||
198 | Ok(res) | ||
199 | } | ||
200 | |||
201 | fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> { | ||
202 | let first_char = chars.next().ok_or(EscapeError::ZeroChars)?; | ||
203 | let res = scan_escape(first_char, chars, mode)?; | ||
204 | if chars.next().is_some() { | ||
205 | return Err(EscapeError::MoreThanOneChar); | ||
206 | } | ||
207 | Ok(res) | ||
208 | } | ||
209 | |||
210 | /// Takes a contents of a string literal (without quotes) and produces a | ||
211 | /// sequence of escaped characters or errors. | ||
212 | fn unescape_str_or_byte_str<F>(src: &str, mode: Mode, callback: &mut F) | ||
213 | where | ||
214 | F: FnMut(Range<usize>, Result<char, EscapeError>), | ||
215 | { | ||
216 | assert!(mode.in_double_quotes()); | ||
217 | let initial_len = src.len(); | ||
218 | let mut chars = src.chars(); | ||
219 | while let Some(first_char) = chars.next() { | ||
220 | let start = initial_len - chars.as_str().len() - first_char.len_utf8(); | ||
221 | |||
222 | let unescaped_char = match first_char { | ||
223 | '\\' => { | ||
224 | let (second_char, third_char) = { | ||
225 | let mut chars = chars.clone(); | ||
226 | (chars.next(), chars.next()) | ||
227 | }; | ||
228 | match (second_char, third_char) { | ||
229 | (Some('\n'), _) | (Some('\r'), Some('\n')) => { | ||
230 | skip_ascii_whitespace(&mut chars); | ||
231 | continue; | ||
232 | } | ||
233 | _ => scan_escape(first_char, &mut chars, mode), | ||
234 | } | ||
235 | } | ||
236 | '\r' => { | ||
237 | let second_char = chars.clone().next(); | ||
238 | if second_char == Some('\n') { | ||
239 | chars.next(); | ||
240 | Ok('\n') | ||
241 | } else { | ||
242 | scan_escape(first_char, &mut chars, mode) | ||
243 | } | ||
244 | } | ||
245 | '\n' => Ok('\n'), | ||
246 | '\t' => Ok('\t'), | ||
247 | _ => scan_escape(first_char, &mut chars, mode), | ||
248 | }; | ||
249 | let end = initial_len - chars.as_str().len(); | ||
250 | callback(start..end, unescaped_char); | ||
251 | } | ||
252 | |||
253 | fn skip_ascii_whitespace(chars: &mut Chars<'_>) { | ||
254 | let str = chars.as_str(); | ||
255 | let first_non_space = str | ||
256 | .bytes() | ||
257 | .position(|b| b != b' ' && b != b'\t' && b != b'\n' && b != b'\r') | ||
258 | .unwrap_or(str.len()); | ||
259 | *chars = str[first_non_space..].chars() | ||
260 | } | ||
261 | } | ||
262 | |||
263 | fn byte_from_char(c: char) -> u8 { | ||
264 | let res = c as u32; | ||
265 | assert!(res <= u8::max_value() as u32, "guaranteed because of Mode::Byte"); | ||
266 | res as u8 | ||
267 | } | ||
268 | |||
269 | fn is_ascii(x: u32) -> bool { | ||
270 | x <= 0x7F | ||
271 | } | ||
272 | |||
273 | #[cfg(test)] | ||
274 | mod tests { | ||
275 | use super::*; | ||
276 | |||
277 | #[test] | ||
278 | fn test_unescape_char_bad() { | ||
279 | fn check(literal_text: &str, expected_error: EscapeError) { | ||
280 | let actual_result = unescape_char(literal_text).map_err(|(_offset, err)| err); | ||
281 | assert_eq!(actual_result, Err(expected_error)); | ||
282 | } | ||
283 | |||
284 | check("", EscapeError::ZeroChars); | ||
285 | check(r"\", EscapeError::LoneSlash); | ||
286 | |||
287 | check("\n", EscapeError::EscapeOnlyChar); | ||
288 | check("\r\n", EscapeError::EscapeOnlyChar); | ||
289 | check("\t", EscapeError::EscapeOnlyChar); | ||
290 | check("'", EscapeError::EscapeOnlyChar); | ||
291 | check("\r", EscapeError::BareCarriageReturn); | ||
292 | |||
293 | check("spam", EscapeError::MoreThanOneChar); | ||
294 | check(r"\x0ff", EscapeError::MoreThanOneChar); | ||
295 | check(r#"\"a"#, EscapeError::MoreThanOneChar); | ||
296 | check(r"\na", EscapeError::MoreThanOneChar); | ||
297 | check(r"\ra", EscapeError::MoreThanOneChar); | ||
298 | check(r"\ta", EscapeError::MoreThanOneChar); | ||
299 | check(r"\\a", EscapeError::MoreThanOneChar); | ||
300 | check(r"\'a", EscapeError::MoreThanOneChar); | ||
301 | check(r"\0a", EscapeError::MoreThanOneChar); | ||
302 | check(r"\u{0}x", EscapeError::MoreThanOneChar); | ||
303 | check(r"\u{1F63b}}", EscapeError::MoreThanOneChar); | ||
304 | |||
305 | check(r"\v", EscapeError::InvalidEscape); | ||
306 | check(r"\💩", EscapeError::InvalidEscape); | ||
307 | check(r"\●", EscapeError::InvalidEscape); | ||
308 | |||
309 | check(r"\x", EscapeError::TooShortHexEscape); | ||
310 | check(r"\x0", EscapeError::TooShortHexEscape); | ||
311 | check(r"\xf", EscapeError::TooShortHexEscape); | ||
312 | check(r"\xa", EscapeError::TooShortHexEscape); | ||
313 | check(r"\xx", EscapeError::InvalidCharInHexEscape); | ||
314 | check(r"\xы", EscapeError::InvalidCharInHexEscape); | ||
315 | check(r"\x🦀", EscapeError::InvalidCharInHexEscape); | ||
316 | check(r"\xtt", EscapeError::InvalidCharInHexEscape); | ||
317 | check(r"\xff", EscapeError::OutOfRangeHexEscape); | ||
318 | check(r"\xFF", EscapeError::OutOfRangeHexEscape); | ||
319 | check(r"\x80", EscapeError::OutOfRangeHexEscape); | ||
320 | |||
321 | check(r"\u", EscapeError::NoBraceInUnicodeEscape); | ||
322 | check(r"\u[0123]", EscapeError::NoBraceInUnicodeEscape); | ||
323 | check(r"\u{0x}", EscapeError::InvalidCharInUnicodeEscape); | ||
324 | check(r"\u{", EscapeError::UnclosedUnicodeEscape); | ||
325 | check(r"\u{0000", EscapeError::UnclosedUnicodeEscape); | ||
326 | check(r"\u{}", EscapeError::EmptyUnicodeEscape); | ||
327 | check(r"\u{_0000}", EscapeError::LeadingUnderscoreUnicodeEscape); | ||
328 | check(r"\u{0000000}", EscapeError::OverlongUnicodeEscape); | ||
329 | check(r"\u{FFFFFF}", EscapeError::OutOfRangeUnicodeEscape); | ||
330 | check(r"\u{ffffff}", EscapeError::OutOfRangeUnicodeEscape); | ||
331 | check(r"\u{ffffff}", EscapeError::OutOfRangeUnicodeEscape); | ||
332 | |||
333 | check(r"\u{DC00}", EscapeError::LoneSurrogateUnicodeEscape); | ||
334 | check(r"\u{DDDD}", EscapeError::LoneSurrogateUnicodeEscape); | ||
335 | check(r"\u{DFFF}", EscapeError::LoneSurrogateUnicodeEscape); | ||
336 | |||
337 | check(r"\u{D800}", EscapeError::LoneSurrogateUnicodeEscape); | ||
338 | check(r"\u{DAAA}", EscapeError::LoneSurrogateUnicodeEscape); | ||
339 | check(r"\u{DBFF}", EscapeError::LoneSurrogateUnicodeEscape); | ||
340 | } | ||
341 | |||
342 | #[test] | ||
343 | fn test_unescape_char_good() { | ||
344 | fn check(literal_text: &str, expected_char: char) { | ||
345 | let actual_result = unescape_char(literal_text); | ||
346 | assert_eq!(actual_result, Ok(expected_char)); | ||
347 | } | ||
348 | |||
349 | check("a", 'a'); | ||
350 | check("ы", 'ы'); | ||
351 | check("🦀", '🦀'); | ||
352 | |||
353 | check(r#"\""#, '"'); | ||
354 | check(r"\n", '\n'); | ||
355 | check(r"\r", '\r'); | ||
356 | check(r"\t", '\t'); | ||
357 | check(r"\\", '\\'); | ||
358 | check(r"\'", '\''); | ||
359 | check(r"\0", '\0'); | ||
360 | |||
361 | check(r"\x00", '\0'); | ||
362 | check(r"\x5a", 'Z'); | ||
363 | check(r"\x5A", 'Z'); | ||
364 | check(r"\x7f", 127 as char); | ||
365 | |||
366 | check(r"\u{0}", '\0'); | ||
367 | check(r"\u{000000}", '\0'); | ||
368 | check(r"\u{41}", 'A'); | ||
369 | check(r"\u{0041}", 'A'); | ||
370 | check(r"\u{00_41}", 'A'); | ||
371 | check(r"\u{4__1__}", 'A'); | ||
372 | check(r"\u{1F63b}", '😻'); | ||
373 | } | ||
374 | |||
375 | #[test] | ||
376 | fn test_unescape_str_good() { | ||
377 | fn check(literal_text: &str, expected: &str) { | ||
378 | let mut buf = Ok(String::with_capacity(literal_text.len())); | ||
379 | unescape_str(literal_text, &mut |range, c| { | ||
380 | if let Ok(b) = &mut buf { | ||
381 | match c { | ||
382 | Ok(c) => b.push(c), | ||
383 | Err(e) => buf = Err((range, e)), | ||
384 | } | ||
385 | } | ||
386 | }); | ||
387 | let buf = buf.as_ref().map(|it| it.as_ref()); | ||
388 | assert_eq!(buf, Ok(expected)) | ||
389 | } | ||
390 | |||
391 | check("foo", "foo"); | ||
392 | check("", ""); | ||
393 | check(" \t\n\r\n", " \t\n\n"); | ||
394 | |||
395 | check("hello \\\n world", "hello world"); | ||
396 | check("hello \\\r\n world", "hello world"); | ||
397 | check("thread's", "thread's") | ||
398 | } | ||
399 | |||
400 | #[test] | ||
401 | fn test_unescape_byte_bad() { | ||
402 | fn check(literal_text: &str, expected_error: EscapeError) { | ||
403 | let actual_result = unescape_byte(literal_text).map_err(|(_offset, err)| err); | ||
404 | assert_eq!(actual_result, Err(expected_error)); | ||
405 | } | ||
406 | |||
407 | check("", EscapeError::ZeroChars); | ||
408 | check(r"\", EscapeError::LoneSlash); | ||
409 | |||
410 | check("\n", EscapeError::EscapeOnlyChar); | ||
411 | check("\r\n", EscapeError::EscapeOnlyChar); | ||
412 | check("\t", EscapeError::EscapeOnlyChar); | ||
413 | check("'", EscapeError::EscapeOnlyChar); | ||
414 | check("\r", EscapeError::BareCarriageReturn); | ||
415 | |||
416 | check("spam", EscapeError::MoreThanOneChar); | ||
417 | check(r"\x0ff", EscapeError::MoreThanOneChar); | ||
418 | check(r#"\"a"#, EscapeError::MoreThanOneChar); | ||
419 | check(r"\na", EscapeError::MoreThanOneChar); | ||
420 | check(r"\ra", EscapeError::MoreThanOneChar); | ||
421 | check(r"\ta", EscapeError::MoreThanOneChar); | ||
422 | check(r"\\a", EscapeError::MoreThanOneChar); | ||
423 | check(r"\'a", EscapeError::MoreThanOneChar); | ||
424 | check(r"\0a", EscapeError::MoreThanOneChar); | ||
425 | |||
426 | check(r"\v", EscapeError::InvalidEscape); | ||
427 | check(r"\💩", EscapeError::InvalidEscape); | ||
428 | check(r"\●", EscapeError::InvalidEscape); | ||
429 | |||
430 | check(r"\x", EscapeError::TooShortHexEscape); | ||
431 | check(r"\x0", EscapeError::TooShortHexEscape); | ||
432 | check(r"\xa", EscapeError::TooShortHexEscape); | ||
433 | check(r"\xf", EscapeError::TooShortHexEscape); | ||
434 | check(r"\xx", EscapeError::InvalidCharInHexEscape); | ||
435 | check(r"\xы", EscapeError::InvalidCharInHexEscape); | ||
436 | check(r"\x🦀", EscapeError::InvalidCharInHexEscape); | ||
437 | check(r"\xtt", EscapeError::InvalidCharInHexEscape); | ||
438 | |||
439 | check(r"\u", EscapeError::NoBraceInUnicodeEscape); | ||
440 | check(r"\u[0123]", EscapeError::NoBraceInUnicodeEscape); | ||
441 | check(r"\u{0x}", EscapeError::InvalidCharInUnicodeEscape); | ||
442 | check(r"\u{", EscapeError::UnclosedUnicodeEscape); | ||
443 | check(r"\u{0000", EscapeError::UnclosedUnicodeEscape); | ||
444 | check(r"\u{}", EscapeError::EmptyUnicodeEscape); | ||
445 | check(r"\u{_0000}", EscapeError::LeadingUnderscoreUnicodeEscape); | ||
446 | check(r"\u{0000000}", EscapeError::OverlongUnicodeEscape); | ||
447 | |||
448 | check("ы", EscapeError::NonAsciiCharInByte); | ||
449 | check("🦀", EscapeError::NonAsciiCharInByte); | ||
450 | |||
451 | check(r"\u{0}", EscapeError::UnicodeEscapeInByte); | ||
452 | check(r"\u{000000}", EscapeError::UnicodeEscapeInByte); | ||
453 | check(r"\u{41}", EscapeError::UnicodeEscapeInByte); | ||
454 | check(r"\u{0041}", EscapeError::UnicodeEscapeInByte); | ||
455 | check(r"\u{00_41}", EscapeError::UnicodeEscapeInByte); | ||
456 | check(r"\u{4__1__}", EscapeError::UnicodeEscapeInByte); | ||
457 | check(r"\u{1F63b}", EscapeError::UnicodeEscapeInByte); | ||
458 | check(r"\u{0}x", EscapeError::UnicodeEscapeInByte); | ||
459 | check(r"\u{1F63b}}", EscapeError::UnicodeEscapeInByte); | ||
460 | check(r"\u{FFFFFF}", EscapeError::UnicodeEscapeInByte); | ||
461 | check(r"\u{ffffff}", EscapeError::UnicodeEscapeInByte); | ||
462 | check(r"\u{ffffff}", EscapeError::UnicodeEscapeInByte); | ||
463 | check(r"\u{DC00}", EscapeError::UnicodeEscapeInByte); | ||
464 | check(r"\u{DDDD}", EscapeError::UnicodeEscapeInByte); | ||
465 | check(r"\u{DFFF}", EscapeError::UnicodeEscapeInByte); | ||
466 | check(r"\u{D800}", EscapeError::UnicodeEscapeInByte); | ||
467 | check(r"\u{DAAA}", EscapeError::UnicodeEscapeInByte); | ||
468 | check(r"\u{DBFF}", EscapeError::UnicodeEscapeInByte); | ||
469 | } | ||
470 | |||
471 | #[test] | ||
472 | fn test_unescape_byte_good() { | ||
473 | fn check(literal_text: &str, expected_byte: u8) { | ||
474 | let actual_result = unescape_byte(literal_text); | ||
475 | assert_eq!(actual_result, Ok(expected_byte)); | ||
476 | } | ||
477 | |||
478 | check("a", b'a'); | ||
479 | |||
480 | check(r#"\""#, b'"'); | ||
481 | check(r"\n", b'\n'); | ||
482 | check(r"\r", b'\r'); | ||
483 | check(r"\t", b'\t'); | ||
484 | check(r"\\", b'\\'); | ||
485 | check(r"\'", b'\''); | ||
486 | check(r"\0", b'\0'); | ||
487 | |||
488 | check(r"\x00", b'\0'); | ||
489 | check(r"\x5a", b'Z'); | ||
490 | check(r"\x5A", b'Z'); | ||
491 | check(r"\x7f", 127); | ||
492 | check(r"\x80", 128); | ||
493 | check(r"\xff", 255); | ||
494 | check(r"\xFF", 255); | ||
495 | } | ||
496 | |||
497 | #[test] | ||
498 | fn test_unescape_byte_str_good() { | ||
499 | fn check(literal_text: &str, expected: &[u8]) { | ||
500 | let mut buf = Ok(Vec::with_capacity(literal_text.len())); | ||
501 | unescape_byte_str(literal_text, &mut |range, c| { | ||
502 | if let Ok(b) = &mut buf { | ||
503 | match c { | ||
504 | Ok(c) => b.push(c), | ||
505 | Err(e) => buf = Err((range, e)), | ||
506 | } | ||
507 | } | ||
508 | }); | ||
509 | let buf = buf.as_ref().map(|it| it.as_ref()); | ||
510 | assert_eq!(buf, Ok(expected)) | ||
511 | } | ||
512 | |||
513 | check("foo", b"foo"); | ||
514 | check("", b""); | ||
515 | check(" \t\n\r\n", b" \t\n\n"); | ||
516 | |||
517 | check("hello \\\n world", b"hello world"); | ||
518 | check("hello \\\r\n world", b"hello world"); | ||
519 | check("thread's", b"thread's") | ||
520 | } | ||
521 | } | ||
diff --git a/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt b/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt index 61a28134a..e0e38d37d 100644 --- a/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt +++ b/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt | |||
@@ -40,7 +40,6 @@ SOURCE_FILE@[0; 112) | |||
40 | WHITESPACE@[43; 44) " " | 40 | WHITESPACE@[43; 44) " " |
41 | LITERAL@[44; 59) | 41 | LITERAL@[44; 59) |
42 | STRING@[44; 59) "\"string\"invalid" | 42 | STRING@[44; 59) "\"string\"invalid" |
43 | err: `Invalid literal suffix` | ||
44 | SEMI@[59; 60) ";" | 43 | SEMI@[59; 60) ";" |
45 | WHITESPACE@[60; 65) "\n " | 44 | WHITESPACE@[60; 65) "\n " |
46 | LET_STMT@[65; 83) | 45 | LET_STMT@[65; 83) |
@@ -53,7 +52,6 @@ SOURCE_FILE@[0; 112) | |||
53 | WHITESPACE@[72; 73) " " | 52 | WHITESPACE@[72; 73) " " |
54 | LITERAL@[73; 82) | 53 | LITERAL@[73; 82) |
55 | BYTE@[73; 82) "b\'b\'_suff" | 54 | BYTE@[73; 82) "b\'b\'_suff" |
56 | err: `Invalid literal suffix` | ||
57 | SEMI@[82; 83) ";" | 55 | SEMI@[82; 83) ";" |
58 | WHITESPACE@[83; 88) "\n " | 56 | WHITESPACE@[83; 88) "\n " |
59 | LET_STMT@[88; 109) | 57 | LET_STMT@[88; 109) |
@@ -66,7 +64,6 @@ SOURCE_FILE@[0; 112) | |||
66 | WHITESPACE@[95; 96) " " | 64 | WHITESPACE@[95; 96) " " |
67 | LITERAL@[96; 108) | 65 | LITERAL@[96; 108) |
68 | BYTE_STRING@[96; 108) "b\"bs\"invalid" | 66 | BYTE_STRING@[96; 108) "b\"bs\"invalid" |
69 | err: `Invalid literal suffix` | ||
70 | SEMI@[108; 109) ";" | 67 | SEMI@[108; 109) ";" |
71 | WHITESPACE@[109; 110) "\n" | 68 | WHITESPACE@[109; 110) "\n" |
72 | R_CURLY@[110; 111) "}" | 69 | R_CURLY@[110; 111) "}" |