aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/hir/src/from_id.rs2
-rw-r--r--crates/hir_def/src/body/lower.rs4
-rw-r--r--crates/hir_def/src/expr.rs2
-rw-r--r--crates/hir_def/src/nameres/collector.rs8
-rw-r--r--crates/hir_def/src/resolver.rs2
-rw-r--r--crates/hir_expand/src/db.rs2
-rw-r--r--crates/hir_ty/src/diagnostics/expr.rs4
-rw-r--r--crates/hir_ty/src/diagnostics/match_check.rs4
-rw-r--r--crates/hir_ty/src/lower.rs6
-rw-r--r--crates/ide/src/doc_links.rs4
-rw-r--r--crates/ide/src/hover.rs20
-rw-r--r--crates/ide/src/references.rs2
-rw-r--r--crates/ide/src/references/rename.rs4
-rw-r--r--crates/ide_db/src/imports_locator.rs2
-rw-r--r--crates/mbe/src/lib.rs4
-rw-r--r--crates/mbe/src/mbe_expander/matcher.rs2
-rw-r--r--crates/mbe/src/mbe_expander/transcriber.rs8
-rw-r--r--crates/mbe/src/syntax_bridge.rs12
-rw-r--r--crates/mbe/src/tests.rs2
-rw-r--r--crates/parser/src/grammar/items/use_item.rs2
-rw-r--r--crates/proc_macro_api/src/msg.rs2
-rw-r--r--crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs2
-rw-r--r--crates/proc_macro_srv/src/rustc_server.rs2
-rw-r--r--crates/rust-analyzer/src/lsp_utils.rs10
-rw-r--r--crates/rust-analyzer/src/markdown.rs2
-rw-r--r--crates/syntax/src/algo.rs4
-rw-r--r--crates/syntax/src/ast/make.rs2
-rw-r--r--crates/syntax/src/parsing/lexer.rs8
-rw-r--r--crates/syntax/src/validation.rs2
-rw-r--r--crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast98
-rw-r--r--crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs2
-rw-r--r--crates/test_utils/src/lib.rs2
32 files changed, 114 insertions, 118 deletions
diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs
index 3e47a5e9d..c8c5fecd7 100644
--- a/crates/hir/src/from_id.rs
+++ b/crates/hir/src/from_id.rs
@@ -1,6 +1,6 @@
1//! Utility module for converting between hir_def ids and code_model wrappers. 1//! Utility module for converting between hir_def ids and code_model wrappers.
2//! 2//!
3//! It's unclear if we need this long-term, but it's definitelly useful while we 3//! It's unclear if we need this long-term, but it's definitely useful while we
4//! are splitting the hir. 4//! are splitting the hir.
5 5
6use hir_def::{ 6use hir_def::{
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs
index 6be1eaade..3dc33f248 100644
--- a/crates/hir_def/src/body/lower.rs
+++ b/crates/hir_def/src/body/lower.rs
@@ -581,7 +581,7 @@ impl ExprCollector<'_> {
581 match res.value { 581 match res.value {
582 Some((mark, expansion)) => { 582 Some((mark, expansion)) => {
583 // FIXME: Statements are too complicated to recover from error for now. 583 // FIXME: Statements are too complicated to recover from error for now.
584 // It is because we don't have any hygenine for local variable expansion right now. 584 // It is because we don't have any hygiene for local variable expansion right now.
585 if T::can_cast(syntax::SyntaxKind::MACRO_STMTS) && res.err.is_some() { 585 if T::can_cast(syntax::SyntaxKind::MACRO_STMTS) && res.err.is_some() {
586 self.expander.exit(self.db, mark); 586 self.expander.exit(self.db, mark);
587 collector(self, None); 587 collector(self, None);
@@ -959,7 +959,7 @@ impl ExprCollector<'_> {
959 959
960 fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Vec<PatId>, Option<usize>) { 960 fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Vec<PatId>, Option<usize>) {
961 // Find the location of the `..`, if there is one. Note that we do not 961 // Find the location of the `..`, if there is one. Note that we do not
962 // consider the possiblity of there being multiple `..` here. 962 // consider the possibility of there being multiple `..` here.
963 let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_))); 963 let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_)));
964 // We want to skip the `..` pattern here, since we account for it above. 964 // We want to skip the `..` pattern here, since we account for it above.
965 let args = args 965 let args = args
diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs
index 6a481769d..76f5721e5 100644
--- a/crates/hir_def/src/expr.rs
+++ b/crates/hir_def/src/expr.rs
@@ -1,6 +1,6 @@
1//! This module describes hir-level representation of expressions. 1//! This module describes hir-level representation of expressions.
2//! 2//!
3//! This representaion is: 3//! This representation is:
4//! 4//!
5//! 1. Identity-based. Each expression has an `id`, so we can distinguish 5//! 1. Identity-based. Each expression has an `id`, so we can distinguish
6//! between different `1` in `1 + 1`. 6//! between different `1` in `1 + 1`.
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs
index 77017e4ea..f027fd48d 100644
--- a/crates/hir_def/src/nameres/collector.rs
+++ b/crates/hir_def/src/nameres/collector.rs
@@ -267,7 +267,7 @@ impl DefCollector<'_> {
267 267
268 // Resolve all indeterminate resolved imports again 268 // Resolve all indeterminate resolved imports again
269 // As some of the macros will expand newly import shadowing partial resolved imports 269 // As some of the macros will expand newly import shadowing partial resolved imports
270 // FIXME: We maybe could skip this, if we handle the Indetermine imports in `resolve_imports` 270 // FIXME: We maybe could skip this, if we handle the indeterminate imports in `resolve_imports`
271 // correctly 271 // correctly
272 let partial_resolved = self.resolved_imports.iter().filter_map(|directive| { 272 let partial_resolved = self.resolved_imports.iter().filter_map(|directive| {
273 if let PartialResolvedImport::Indeterminate(_) = directive.status { 273 if let PartialResolvedImport::Indeterminate(_) = directive.status {
@@ -402,7 +402,7 @@ impl DefCollector<'_> {
402 402
403 /// Define a proc macro 403 /// Define a proc macro
404 /// 404 ///
405 /// A proc macro is similar to normal macro scope, but it would not visiable in legacy textual scoped. 405 /// A proc macro is similar to normal macro scope, but it would not visible in legacy textual scoped.
406 /// And unconditionally exported. 406 /// And unconditionally exported.
407 fn define_proc_macro(&mut self, name: Name, macro_: MacroDefId) { 407 fn define_proc_macro(&mut self, name: Name, macro_: MacroDefId) {
408 self.update( 408 self.update(
@@ -592,7 +592,7 @@ impl DefCollector<'_> {
592 // XXX: urgh, so this works by accident! Here, we look at 592 // XXX: urgh, so this works by accident! Here, we look at
593 // the enum data, and, in theory, this might require us to 593 // the enum data, and, in theory, this might require us to
594 // look back at the crate_def_map, creating a cycle. For 594 // look back at the crate_def_map, creating a cycle. For
595 // example, `enum E { crate::some_macro!(); }`. Luckely, the 595 // example, `enum E { crate::some_macro!(); }`. Luckily, the
596 // only kind of macro that is allowed inside enum is a 596 // only kind of macro that is allowed inside enum is a
597 // `cfg_macro`, and we don't need to run name resolution for 597 // `cfg_macro`, and we don't need to run name resolution for
598 // it, but this is sheer luck! 598 // it, but this is sheer luck!
@@ -655,7 +655,7 @@ impl DefCollector<'_> {
655 &mut self, 655 &mut self,
656 module_id: LocalModuleId, 656 module_id: LocalModuleId,
657 resolutions: &[(Option<Name>, PerNs)], 657 resolutions: &[(Option<Name>, PerNs)],
658 // All resolutions are imported with this visibility; the visibilies in 658 // All resolutions are imported with this visibility; the visibilities in
659 // the `PerNs` values are ignored and overwritten 659 // the `PerNs` values are ignored and overwritten
660 vis: Visibility, 660 vis: Visibility,
661 import_type: ImportType, 661 import_type: ImportType,
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs
index 129f1dbac..e4152a0be 100644
--- a/crates/hir_def/src/resolver.rs
+++ b/crates/hir_def/src/resolver.rs
@@ -27,7 +27,7 @@ use crate::{
27 27
28#[derive(Debug, Clone, Default)] 28#[derive(Debug, Clone, Default)]
29pub struct Resolver { 29pub struct Resolver {
30 // FIXME: all usages generally call `.rev`, so maybe reverse once in consturciton? 30 // FIXME: all usages generally call `.rev`, so maybe reverse once in construction?
31 scopes: Vec<Scope>, 31 scopes: Vec<Scope>,
32} 32}
33 33
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index ab2637b8c..c62086390 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -40,7 +40,7 @@ impl TokenExpander {
40 // FIXME switch these to ExpandResult as well 40 // FIXME switch these to ExpandResult as well
41 TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), 41 TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(),
42 TokenExpander::ProcMacro(_) => { 42 TokenExpander::ProcMacro(_) => {
43 // We store the result in salsa db to prevent non-determinisc behavior in 43 // We store the result in salsa db to prevent non-deterministic behavior in
44 // some proc-macro implementation 44 // some proc-macro implementation
45 // See #4315 for details 45 // See #4315 for details
46 db.expand_proc_macro(id.into()).into() 46 db.expand_proc_macro(id.into()).into()
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs
index a1c484fdf..107417c27 100644
--- a/crates/hir_ty/src/diagnostics/expr.rs
+++ b/crates/hir_ty/src/diagnostics/expr.rs
@@ -379,7 +379,7 @@ pub fn record_literal_missing_fields(
379 id: ExprId, 379 id: ExprId,
380 expr: &Expr, 380 expr: &Expr,
381) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> { 381) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
382 let (fields, exhausitve) = match expr { 382 let (fields, exhaustive) = match expr {
383 Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()), 383 Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
384 _ => return None, 384 _ => return None,
385 }; 385 };
@@ -400,7 +400,7 @@ pub fn record_literal_missing_fields(
400 if missed_fields.is_empty() { 400 if missed_fields.is_empty() {
401 return None; 401 return None;
402 } 402 }
403 Some((variant_def, missed_fields, exhausitve)) 403 Some((variant_def, missed_fields, exhaustive))
404} 404}
405 405
406pub fn record_pattern_missing_fields( 406pub fn record_pattern_missing_fields(
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs
index 62c329731..61c47eec8 100644
--- a/crates/hir_ty/src/diagnostics/match_check.rs
+++ b/crates/hir_ty/src/diagnostics/match_check.rs
@@ -14,7 +14,7 @@
14//! The algorithm implemented here is a modified version of the one described in 14//! The algorithm implemented here is a modified version of the one described in
15//! <http://moscova.inria.fr/~maranget/papers/warn/index.html>. 15//! <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
16//! However, to save future implementors from reading the original paper, we 16//! However, to save future implementors from reading the original paper, we
17//! summarise the algorithm here to hopefully save time and be a little clearer 17//! summarize the algorithm here to hopefully save time and be a little clearer
18//! (without being so rigorous). 18//! (without being so rigorous).
19//! 19//!
20//! The core of the algorithm revolves about a "usefulness" check. In particular, we 20//! The core of the algorithm revolves about a "usefulness" check. In particular, we
@@ -132,7 +132,7 @@
132//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). 132//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns).
133//! That means we're going to check the components from left-to-right, so the algorithm 133//! That means we're going to check the components from left-to-right, so the algorithm
134//! operates principally on the first component of the matrix and new pattern-stack `p`. 134//! operates principally on the first component of the matrix and new pattern-stack `p`.
135//! This algorithm is realised in the `is_useful` function. 135//! This algorithm is realized in the `is_useful` function.
136//! 136//!
137//! Base case (`n = 0`, i.e., an empty tuple pattern): 137//! Base case (`n = 0`, i.e., an empty tuple pattern):
138//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then 138//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs
index 222f61a11..9594cce8b 100644
--- a/crates/hir_ty/src/lower.rs
+++ b/crates/hir_ty/src/lower.rs
@@ -491,16 +491,16 @@ impl Ty {
491 fn from_hir_path_inner( 491 fn from_hir_path_inner(
492 ctx: &TyLoweringContext<'_>, 492 ctx: &TyLoweringContext<'_>,
493 segment: PathSegment<'_>, 493 segment: PathSegment<'_>,
494 typable: TyDefId, 494 typeable: TyDefId,
495 infer_args: bool, 495 infer_args: bool,
496 ) -> Ty { 496 ) -> Ty {
497 let generic_def = match typable { 497 let generic_def = match typeable {
498 TyDefId::BuiltinType(_) => None, 498 TyDefId::BuiltinType(_) => None,
499 TyDefId::AdtId(it) => Some(it.into()), 499 TyDefId::AdtId(it) => Some(it.into()),
500 TyDefId::TypeAliasId(it) => Some(it.into()), 500 TyDefId::TypeAliasId(it) => Some(it.into()),
501 }; 501 };
502 let substs = substs_from_path_segment(ctx, segment, generic_def, infer_args); 502 let substs = substs_from_path_segment(ctx, segment, generic_def, infer_args);
503 ctx.db.ty(typable).subst(&substs) 503 ctx.db.ty(typeable).subst(&substs)
504 } 504 }
505 505
506 /// Collect generic arguments from a path into a `Substs`. See also 506 /// Collect generic arguments from a path into a `Substs`. See also
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 91f4241f9..de10406bc 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -39,7 +39,7 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Defi
39 if target.contains("://") { 39 if target.contains("://") {
40 (target.to_string(), title.to_string()) 40 (target.to_string(), title.to_string())
41 } else { 41 } else {
42 // Two posibilities: 42 // Two possibilities:
43 // * path-based links: `../../module/struct.MyStruct.html` 43 // * path-based links: `../../module/struct.MyStruct.html`
44 // * module-based links (AKA intra-doc links): `super::super::module::MyStruct` 44 // * module-based links (AKA intra-doc links): `super::super::module::MyStruct`
45 if let Some(rewritten) = rewrite_intra_doc_link(db, *definition, target, title) { 45 if let Some(rewritten) = rewrite_intra_doc_link(db, *definition, target, title) {
@@ -442,7 +442,7 @@ fn get_symbol_fragment(db: &dyn HirDatabase, field_or_assoc: &FieldOrAssocItem)
442 function.as_assoc_item(db).map(|assoc| assoc.container(db)), 442 function.as_assoc_item(db).map(|assoc| assoc.container(db)),
443 Some(AssocItemContainer::Trait(..)) 443 Some(AssocItemContainer::Trait(..))
444 ); 444 );
445 // This distinction may get more complicated when specialisation is available. 445 // This distinction may get more complicated when specialization is available.
446 // Rustdoc makes this decision based on whether a method 'has defaultness'. 446 // Rustdoc makes this decision based on whether a method 'has defaultness'.
447 // Currently this is only the case for provided trait methods. 447 // Currently this is only the case for provided trait methods.
448 if is_trait_method && !function.has_body(db) { 448 if is_trait_method && !function.has_body(db) {
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index e331f8886..e892d5588 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -1953,16 +1953,16 @@ struct S {
1953/// Test cases: 1953/// Test cases:
1954/// case 1. bare URL: https://www.example.com/ 1954/// case 1. bare URL: https://www.example.com/
1955/// case 2. inline URL with title: [example](https://www.example.com/) 1955/// case 2. inline URL with title: [example](https://www.example.com/)
1956/// case 3. code refrence: [`Result`] 1956/// case 3. code reference: [`Result`]
1957/// case 4. code refrence but miss footnote: [`String`] 1957/// case 4. code reference but miss footnote: [`String`]
1958/// case 5. autolink: <http://www.example.com/> 1958/// case 5. autolink: <http://www.example.com/>
1959/// case 6. email address: <[email protected]> 1959/// case 6. email address: <[email protected]>
1960/// case 7. refrence: [example][example] 1960/// case 7. reference: [example][example]
1961/// case 8. collapsed link: [example][] 1961/// case 8. collapsed link: [example][]
1962/// case 9. shortcut link: [example] 1962/// case 9. shortcut link: [example]
1963/// case 10. inline without URL: [example]() 1963/// case 10. inline without URL: [example]()
1964/// case 11. refrence: [foo][foo] 1964/// case 11. reference: [foo][foo]
1965/// case 12. refrence: [foo][bar] 1965/// case 12. reference: [foo][bar]
1966/// case 13. collapsed link: [foo][] 1966/// case 13. collapsed link: [foo][]
1967/// case 14. shortcut link: [foo] 1967/// case 14. shortcut link: [foo]
1968/// case 15. inline without URL: [foo]() 1968/// case 15. inline without URL: [foo]()
@@ -1989,16 +1989,16 @@ pub fn fo$0o() {}
1989 Test cases: 1989 Test cases:
1990 case 1. bare URL: https://www.example.com/ 1990 case 1. bare URL: https://www.example.com/
1991 case 2. inline URL with title: [example](https://www.example.com/) 1991 case 2. inline URL with title: [example](https://www.example.com/)
1992 case 3. code refrence: `Result` 1992 case 3. code reference: `Result`
1993 case 4. code refrence but miss footnote: `String` 1993 case 4. code reference but miss footnote: `String`
1994 case 5. autolink: http://www.example.com/ 1994 case 5. autolink: http://www.example.com/
1995 case 6. email address: [email protected] 1995 case 6. email address: [email protected]
1996 case 7. refrence: example 1996 case 7. reference: example
1997 case 8. collapsed link: example 1997 case 8. collapsed link: example
1998 case 9. shortcut link: example 1998 case 9. shortcut link: example
1999 case 10. inline without URL: example 1999 case 10. inline without URL: example
2000 case 11. refrence: foo 2000 case 11. reference: foo
2001 case 12. refrence: foo 2001 case 12. reference: foo
2002 case 13. collapsed link: foo 2002 case 13. collapsed link: foo
2003 case 14. shortcut link: foo 2003 case 14. shortcut link: foo
2004 case 15. inline without URL: foo 2004 case 15. inline without URL: foo
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index d44d96dd4..b774a2be1 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -3,7 +3,7 @@
3//! or `ast::NameRef`. If it's a `ast::NameRef`, at the classification step we 3//! or `ast::NameRef`. If it's a `ast::NameRef`, at the classification step we
4//! try to resolve the direct tree parent of this element, otherwise we 4//! try to resolve the direct tree parent of this element, otherwise we
5//! already have a definition and just need to get its HIR together with 5//! already have a definition and just need to get its HIR together with
6//! some information that is needed for futher steps of searching. 6//! some information that is needed for further steps of searching.
7//! After that, we collect files that might contain references and look 7//! After that, we collect files that might contain references and look
8//! for text occurrences of the identifier. If there's an `ast::NameRef` 8//! for text occurrences of the identifier. If there's an `ast::NameRef`
9//! at the index that the match starts at and its tree parent is 9//! at the index that the match starts at and its tree parent is
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs
index 53d79333c..099900673 100644
--- a/crates/ide/src/references/rename.rs
+++ b/crates/ide/src/references/rename.rs
@@ -945,7 +945,7 @@ use crate::foo$0::FooContent;
945//- /lib.rs 945//- /lib.rs
946mod fo$0o; 946mod fo$0o;
947//- /foo/mod.rs 947//- /foo/mod.rs
948// emtpy 948// empty
949"#, 949"#,
950 expect![[r#" 950 expect![[r#"
951 RangeInfo { 951 RangeInfo {
@@ -995,7 +995,7 @@ mod fo$0o;
995mod outer { mod fo$0o; } 995mod outer { mod fo$0o; }
996 996
997//- /outer/foo.rs 997//- /outer/foo.rs
998// emtpy 998// empty
999"#, 999"#,
1000 expect![[r#" 1000 expect![[r#"
1001 RangeInfo { 1001 RangeInfo {
diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs
index 0782ab070..e9f23adf8 100644
--- a/crates/ide_db/src/imports_locator.rs
+++ b/crates/ide_db/src/imports_locator.rs
@@ -1,4 +1,4 @@
1//! This module contains an import search funcionality that is provided to the assists module. 1//! This module contains an import search functionality that is provided to the assists module.
2//! Later, this should be moved away to a separate crate that is accessible from the assists module. 2//! Later, this should be moved away to a separate crate that is accessible from the assists module.
3 3
4use hir::{import_map, AsAssocItem, Crate, MacroDef, ModuleDef, Semantics}; 4use hir::{import_map, AsAssocItem, Crate, MacroDef, ModuleDef, Semantics};
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index b3472879d..19543d777 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -24,7 +24,7 @@ use crate::{
24#[derive(Debug, PartialEq, Eq)] 24#[derive(Debug, PartialEq, Eq)]
25pub enum ParseError { 25pub enum ParseError {
26 Expected(String), 26 Expected(String),
27 RepetitionEmtpyTokenTree, 27 RepetitionEmptyTokenTree,
28} 28}
29 29
30#[derive(Debug, PartialEq, Eq, Clone)] 30#[derive(Debug, PartialEq, Eq, Clone)]
@@ -270,7 +270,7 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
270 } 270 }
271 false 271 false
272 }) { 272 }) {
273 return Err(ParseError::RepetitionEmtpyTokenTree); 273 return Err(ParseError::RepetitionEmptyTokenTree);
274 } 274 }
275 } 275 }
276 validate(subtree)? 276 validate(subtree)?
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs
index c6d615c81..d32e60521 100644
--- a/crates/mbe/src/mbe_expander/matcher.rs
+++ b/crates/mbe/src/mbe_expander/matcher.rs
@@ -378,7 +378,7 @@ pub(super) fn match_repeat(
378 src: &mut TtIter, 378 src: &mut TtIter,
379) -> Result<(), ExpandError> { 379) -> Result<(), ExpandError> {
380 // Dirty hack to make macro-expansion terminate. 380 // Dirty hack to make macro-expansion terminate.
381 // This should be replaced by a propper macro-by-example implementation 381 // This should be replaced by a proper macro-by-example implementation
382 let mut limit = 65536; 382 let mut limit = 65536;
383 let mut counter = 0; 383 let mut counter = 0;
384 384
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs
index 27b2ac777..59a3c80a8 100644
--- a/crates/mbe/src/mbe_expander/transcriber.rs
+++ b/crates/mbe/src/mbe_expander/transcriber.rs
@@ -67,7 +67,7 @@ struct NestingState {
67 /// because there is no variable in use by the current repetition 67 /// because there is no variable in use by the current repetition
68 hit: bool, 68 hit: bool,
69 /// `at_end` is currently necessary to tell `expand_repeat` if it should stop 69 /// `at_end` is currently necessary to tell `expand_repeat` if it should stop
70 /// because there is no more value avaible for the current repetition 70 /// because there is no more value available for the current repetition
71 at_end: bool, 71 at_end: bool,
72} 72}
73 73
@@ -179,11 +179,7 @@ fn expand_repeat(
179 179
180 counter += 1; 180 counter += 1;
181 if counter == limit { 181 if counter == limit {
182 log::warn!( 182 log::warn!("expand_tt in repeat pattern exceed limit => {:#?}\n{:#?}", template, ctx);
183 "expand_tt excced in repeat pattern exceed limit => {:#?}\n{:#?}",
184 template,
185 ctx
186 );
187 break; 183 break;
188 } 184 }
189 185
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 671036e1c..e648519f9 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -149,7 +149,7 @@ impl TokenMap {
149 } 149 }
150 150
151 fn remove_delim(&mut self, idx: usize) { 151 fn remove_delim(&mut self, idx: usize) {
152 // FIXME: This could be accidently quadratic 152 // FIXME: This could be accidentally quadratic
153 self.entries.remove(idx); 153 self.entries.remove(idx);
154 } 154 }
155} 155}
@@ -476,14 +476,14 @@ impl Convertor {
476 476
477#[derive(Debug)] 477#[derive(Debug)]
478enum SynToken { 478enum SynToken {
479 Ordiniary(SyntaxToken), 479 Ordinary(SyntaxToken),
480 Punch(SyntaxToken, TextSize), 480 Punch(SyntaxToken, TextSize),
481} 481}
482 482
483impl SynToken { 483impl SynToken {
484 fn token(&self) -> &SyntaxToken { 484 fn token(&self) -> &SyntaxToken {
485 match self { 485 match self {
486 SynToken::Ordiniary(it) => it, 486 SynToken::Ordinary(it) => it,
487 SynToken::Punch(it, _) => it, 487 SynToken::Punch(it, _) => it,
488 } 488 }
489 } 489 }
@@ -495,7 +495,7 @@ impl SrcToken for SynToken {
495 } 495 }
496 fn to_char(&self) -> Option<char> { 496 fn to_char(&self) -> Option<char> {
497 match self { 497 match self {
498 SynToken::Ordiniary(_) => None, 498 SynToken::Ordinary(_) => None,
499 SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), 499 SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
500 } 500 }
501 } 501 }
@@ -535,7 +535,7 @@ impl TokenConvertor for Convertor {
535 } else { 535 } else {
536 self.punct_offset = None; 536 self.punct_offset = None;
537 let range = curr.text_range(); 537 let range = curr.text_range();
538 (SynToken::Ordiniary(curr), range) 538 (SynToken::Ordinary(curr), range)
539 }; 539 };
540 540
541 Some(token) 541 Some(token)
@@ -557,7 +557,7 @@ impl TokenConvertor for Convertor {
557 let token = if curr.kind().is_punct() { 557 let token = if curr.kind().is_punct() {
558 SynToken::Punch(curr, 0.into()) 558 SynToken::Punch(curr, 0.into())
559 } else { 559 } else {
560 SynToken::Ordiniary(curr) 560 SynToken::Ordinary(curr)
561 }; 561 };
562 Some(token) 562 Some(token)
563 } 563 }
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index d854985c5..ecea15c11 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -1967,7 +1967,7 @@ fn test_no_space_after_semi_colon() {
1967#[test] 1967#[test]
1968fn test_rustc_issue_57597() { 1968fn test_rustc_issue_57597() {
1969 fn test_error(fixture: &str) { 1969 fn test_error(fixture: &str) {
1970 assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmtpyTokenTree); 1970 assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmptyTokenTree);
1971 } 1971 }
1972 1972
1973 test_error("macro_rules! foo { ($($($i:ident)?)+) => {}; }"); 1973 test_error("macro_rules! foo { ($($($i:ident)?)+) => {}; }");
diff --git a/crates/parser/src/grammar/items/use_item.rs b/crates/parser/src/grammar/items/use_item.rs
index 20e6a13cf..5cb8b08e7 100644
--- a/crates/parser/src/grammar/items/use_item.rs
+++ b/crates/parser/src/grammar/items/use_item.rs
@@ -46,7 +46,7 @@ fn use_tree(p: &mut Parser, top_level: bool) {
46 // test use_tree_list 46 // test use_tree_list
47 // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) 47 // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`)
48 // use {path::from::root}; // Rust 2015 48 // use {path::from::root}; // Rust 2015
49 // use ::{some::arbritrary::path}; // Rust 2015 49 // use ::{some::arbitrary::path}; // Rust 2015
50 // use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting 50 // use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting
51 T!['{'] => { 51 T!['{'] => {
52 use_tree_list(p); 52 use_tree_list(p);
diff --git a/crates/proc_macro_api/src/msg.rs b/crates/proc_macro_api/src/msg.rs
index 4cd572101..970f165ed 100644
--- a/crates/proc_macro_api/src/msg.rs
+++ b/crates/proc_macro_api/src/msg.rs
@@ -79,7 +79,7 @@ impl Message for Response {}
79fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> { 79fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> {
80 let mut buf = String::new(); 80 let mut buf = String::new();
81 inp.read_line(&mut buf)?; 81 inp.read_line(&mut buf)?;
82 buf.pop(); // Remove traling '\n' 82 buf.pop(); // Remove trailing '\n'
83 Ok(match buf.len() { 83 Ok(match buf.len() {
84 0 => None, 84 0 => None,
85 _ => Some(buf), 85 _ => Some(buf),
diff --git a/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs b/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs
index 3528d5c99..bd1e7c2fc 100644
--- a/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs
+++ b/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs
@@ -251,7 +251,7 @@ impl<S> DecodeMut<'_, '_, S> for String {
251 } 251 }
252} 252}
253 253
254/// Simplied version of panic payloads, ignoring 254/// Simplified version of panic payloads, ignoring
255/// types other than `&'static str` and `String`. 255/// types other than `&'static str` and `String`.
256#[derive(Debug)] 256#[derive(Debug)]
257pub enum PanicMessage { 257pub enum PanicMessage {
diff --git a/crates/proc_macro_srv/src/rustc_server.rs b/crates/proc_macro_srv/src/rustc_server.rs
index b54aa1f3b..e6006a3c8 100644
--- a/crates/proc_macro_srv/src/rustc_server.rs
+++ b/crates/proc_macro_srv/src/rustc_server.rs
@@ -4,7 +4,7 @@
4//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that 4//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
5//! we could provide any TokenStream implementation. 5//! we could provide any TokenStream implementation.
6//! The original idea from fedochet is using proc-macro2 as backend, 6//! The original idea from fedochet is using proc-macro2 as backend,
7//! we use tt instead for better intergation with RA. 7//! we use tt instead for better integration with RA.
8//! 8//!
9//! FIXME: No span and source file information is implemented yet 9//! FIXME: No span and source file information is implemented yet
10 10
diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs
index 40de56dad..2d06fe538 100644
--- a/crates/rust-analyzer/src/lsp_utils.rs
+++ b/crates/rust-analyzer/src/lsp_utils.rs
@@ -130,7 +130,7 @@ pub(crate) fn apply_document_changes(
130} 130}
131 131
132/// Checks that the edits inside the completion and the additional edits do not overlap. 132/// Checks that the edits inside the completion and the additional edits do not overlap.
133/// LSP explicitly forbits the additional edits to overlap both with the main edit and themselves. 133/// LSP explicitly forbids the additional edits to overlap both with the main edit and themselves.
134pub(crate) fn all_edits_are_disjoint( 134pub(crate) fn all_edits_are_disjoint(
135 completion: &lsp_types::CompletionItem, 135 completion: &lsp_types::CompletionItem,
136 additional_edits: &[lsp_types::TextEdit], 136 additional_edits: &[lsp_types::TextEdit],
@@ -290,7 +290,7 @@ mod tests {
290 Some(vec![disjoint_edit.clone(), joint_edit.clone()]); 290 Some(vec![disjoint_edit.clone(), joint_edit.clone()]);
291 assert!( 291 assert!(
292 !all_edits_are_disjoint(&completion_with_joint_edits, &[]), 292 !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
293 "Completion with disjoint edits fails the validaton even with empty extra edits" 293 "Completion with disjoint edits fails the validation even with empty extra edits"
294 ); 294 );
295 295
296 completion_with_joint_edits.text_edit = 296 completion_with_joint_edits.text_edit =
@@ -298,7 +298,7 @@ mod tests {
298 completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit.clone()]); 298 completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit.clone()]);
299 assert!( 299 assert!(
300 !all_edits_are_disjoint(&completion_with_joint_edits, &[]), 300 !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
301 "Completion with disjoint edits fails the validaton even with empty extra edits" 301 "Completion with disjoint edits fails the validation even with empty extra edits"
302 ); 302 );
303 303
304 completion_with_joint_edits.text_edit = 304 completion_with_joint_edits.text_edit =
@@ -310,7 +310,7 @@ mod tests {
310 completion_with_joint_edits.additional_text_edits = None; 310 completion_with_joint_edits.additional_text_edits = None;
311 assert!( 311 assert!(
312 !all_edits_are_disjoint(&completion_with_joint_edits, &[]), 312 !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
313 "Completion with disjoint edits fails the validaton even with empty extra edits" 313 "Completion with disjoint edits fails the validation even with empty extra edits"
314 ); 314 );
315 315
316 completion_with_joint_edits.text_edit = 316 completion_with_joint_edits.text_edit =
@@ -322,7 +322,7 @@ mod tests {
322 completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit]); 322 completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit]);
323 assert!( 323 assert!(
324 !all_edits_are_disjoint(&completion_with_joint_edits, &[]), 324 !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
325 "Completion with disjoint edits fails the validaton even with empty extra edits" 325 "Completion with disjoint edits fails the validation even with empty extra edits"
326 ); 326 );
327 } 327 }
328 328
diff --git a/crates/rust-analyzer/src/markdown.rs b/crates/rust-analyzer/src/markdown.rs
index a49a58c00..865eaae9b 100644
--- a/crates/rust-analyzer/src/markdown.rs
+++ b/crates/rust-analyzer/src/markdown.rs
@@ -106,7 +106,7 @@ mod tests {
106 106
107 #[test] 107 #[test]
108 fn test_format_docs_preserves_newlines() { 108 fn test_format_docs_preserves_newlines() {
109 let comment = "this\nis\nultiline"; 109 let comment = "this\nis\nmultiline";
110 assert_eq!(format_docs(comment), comment); 110 assert_eq!(format_docs(comment), comment);
111 } 111 }
112 112
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 22ab36cd2..384d031e7 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -88,8 +88,8 @@ pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNod
88 let keep = u_depth.min(v_depth); 88 let keep = u_depth.min(v_depth);
89 89
90 let u_candidates = u.ancestors().skip(u_depth - keep); 90 let u_candidates = u.ancestors().skip(u_depth - keep);
91 let v_canidates = v.ancestors().skip(v_depth - keep); 91 let v_candidates = v.ancestors().skip(v_depth - keep);
92 let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?; 92 let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?;
93 Some(res) 93 Some(res)
94} 94}
95 95
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index cafa4c198..1ed8a96e5 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -241,7 +241,7 @@ pub fn wildcard_pat() -> ast::WildcardPat {
241 } 241 }
242} 242}
243 243
244/// Creates a tuple of patterns from an interator of patterns. 244/// Creates a tuple of patterns from an iterator of patterns.
245/// 245///
246/// Invariant: `pats` must be length > 1 246/// Invariant: `pats` must be length > 1
247/// 247///
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 0cbba73c5..7c8d0a4c4 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -24,7 +24,7 @@ pub struct Token {
24/// Beware that it checks for shebang first and its length contributes to resulting 24/// Beware that it checks for shebang first and its length contributes to resulting
25/// tokens offsets. 25/// tokens offsets.
26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { 26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
27 // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`. 27 // non-empty string is a precondition of `rustc_lexer::strip_shebang()`.
28 if text.is_empty() { 28 if text.is_empty() {
29 return Default::default(); 29 return Default::default();
30 } 30 }
@@ -76,7 +76,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr
76} 76}
77 77
78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and 78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and
79/// returns `None` if any tokenization error occured. 79/// returns `None` if any tokenization error occurred.
80/// 80///
81/// Beware that unescape errors are not checked at tokenization time. 81/// Beware that unescape errors are not checked at tokenization time.
82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { 82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
@@ -96,7 +96,7 @@ pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
96/// 96///
97/// Beware that unescape errors are not checked at tokenization time. 97/// Beware that unescape errors are not checked at tokenization time.
98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { 98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
99 // non-empty string is a precondtion of `rustc_lexer::first_token()`. 99 // non-empty string is a precondition of `rustc_lexer::first_token()`.
100 if text.is_empty() { 100 if text.is_empty() {
101 return None; 101 return None;
102 } 102 }
@@ -117,7 +117,7 @@ fn rustc_token_kind_to_syntax_kind(
117 token_text: &str, 117 token_text: &str,
118) -> (SyntaxKind, Option<&'static str>) { 118) -> (SyntaxKind, Option<&'static str>) {
119 // A note on an intended tradeoff: 119 // A note on an intended tradeoff:
120 // We drop some useful infromation here (see patterns with double dots `..`) 120 // We drop some useful information here (see patterns with double dots `..`)
121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of 121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
122 // being `u16` that come from `rowan::SyntaxKind`. 122 // being `u16` that come from `rowan::SyntaxKind`.
123 123
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 7f9088382..bfa2dc4ba 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -173,7 +173,7 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) {
173 assert_eq!( 173 assert_eq!(
174 node.parent(), 174 node.parent(),
175 pair.parent(), 175 pair.parent(),
176 "\nunpaired curleys:\n{}\n{:#?}\n", 176 "\nunpaired curlys:\n{}\n{:#?}\n",
177 root.text(), 177 root.text(),
178 root, 178 root,
179 ); 179 );
diff --git a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast
index b1fb75ed1..f40500e38 100644
--- a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast
+++ b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast
@@ -1,4 +1,4 @@
1[email protected]9 1[email protected]8
2 [email protected] 2 [email protected]
3 [email protected] "use" 3 [email protected] "use"
4 [email protected] " " 4 [email protected] " "
@@ -75,62 +75,62 @@ [email protected]
75 [email protected] "}" 75 [email protected] "}"
76 [email protected] ";" 76 [email protected] ";"
77 [email protected] " " 77 [email protected] " "
78 [email protected]6 78 [email protected]5
79 [email protected] "// Rust 2015" 79 [email protected] "// Rust 2015"
80 [email protected] "\n" 80 [email protected] "\n"
81 [email protected] "use" 81 [email protected] "use"
82 [email protected] " " 82 [email protected] " "
83 [email protected]5 83 [email protected]4
84 [email protected] "::" 84 [email protected] "::"
85 [email protected]5 85 [email protected]4
86 [email protected] "{" 86 [email protected] "{"
87 [email protected]4 87 [email protected]3
88 [email protected]4 88 [email protected]3
89 [email protected]8 89 [email protected]7
90 [email protected] 90 [email protected]
91 [email protected] 91 [email protected]
92 [email protected] 92 [email protected]
93 [email protected] "some" 93 [email protected] "some"
94 [email protected] "::" 94 [email protected] "::"
95 [email protected]8 95 [email protected]7
96 [email protected]8 96 [email protected]7
97 [email protected]8 "arbritrary" 97 [email protected]7 "arbitrary"
98 COLON2@158..160 "::" 98 COLON2@157..159 "::"
99 PATH_SEGMENT@160..164 99 PATH_SEGMENT@159..163
100 NAME_REF@160..164 100 NAME_REF@159..163
101 IDENT@160..164 "path" 101 IDENT@159..163 "path"
102 R_CURLY@164..165 "}" 102 R_CURLY@163..164 "}"
103 SEMICOLON@165..166 ";" 103 SEMICOLON@164..165 ";"
104 WHITESPACE@166..167 " " 104 WHITESPACE@165..166 " "
105 USE@167..205 105 USE@166..204
106 COMMENT@167..179 "// Rust 2015" 106 COMMENT@166..178 "// Rust 2015"
107 WHITESPACE@179..180 "\n" 107 WHITESPACE@178..179 "\n"
108 USE_KW@180..183 "use" 108 USE_KW@179..182 "use"
109 WHITESPACE@183..184 " " 109 WHITESPACE@182..183 " "
110 USE_TREE@184..204 110 USE_TREE@183..203
111 COLON2@184..186 "::" 111 COLON2@183..185 "::"
112 USE_TREE_LIST@186..204 112 USE_TREE_LIST@185..203
113 L_CURLY@186..187 "{" 113 L_CURLY@185..186 "{"
114 USE_TREE@187..203 114 USE_TREE@186..202
115 USE_TREE_LIST@187..203 115 USE_TREE_LIST@186..202
116 L_CURLY@187..188 "{" 116 L_CURLY@186..187 "{"
117 USE_TREE@188..202 117 USE_TREE@187..201
118 USE_TREE_LIST@188..202 118 USE_TREE_LIST@187..201
119 L_CURLY@188..189 "{" 119 L_CURLY@187..188 "{"
120 USE_TREE@189..201 120 USE_TREE@188..200
121 PATH@189..201 121 PATH@188..200
122 PATH@189..193 122 PATH@188..192
123 PATH_SEGMENT@189..193 123 PATH_SEGMENT@188..192
124 NAME_REF@189..193 124 NAME_REF@188..192
125 IDENT@189..193 "root" 125 IDENT@188..192 "root"
126 COLON2@193..195 "::" 126 COLON2@192..194 "::"
127 PATH_SEGMENT@195..201 127 PATH_SEGMENT@194..200
128 NAME_REF@195..201 128 NAME_REF@194..200
129 IDENT@195..201 "export" 129 IDENT@194..200 "export"
130 R_CURLY@201..202 "}" 130 R_CURLY@200..201 "}"
131 R_CURLY@202..203 "}" 131 R_CURLY@201..202 "}"
132 R_CURLY@203..204 "}" 132 R_CURLY@202..203 "}"
133 SEMICOLON@204..205 ";" 133 SEMICOLON@203..204 ";"
134 WHITESPACE@205..206 " " 134 WHITESPACE@204..205 " "
135 COMMENT@206..248 "// Nonsensical but pe ..." 135 COMMENT@205..247 "// Nonsensical but pe ..."
136 WHITESPACE@248..249 "\n" 136 WHITESPACE@247..248 "\n"
diff --git a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs
index 381cba1e2..02af4b446 100644
--- a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs
+++ b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs
@@ -1,4 +1,4 @@
1use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) 1use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`)
2use {path::from::root}; // Rust 2015 2use {path::from::root}; // Rust 2015
3use ::{some::arbritrary::path}; // Rust 2015 3use ::{some::arbitrary::path}; // Rust 2015
4use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting 4use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs
index 84c1d7ebb..e19d2ad61 100644
--- a/crates/test_utils/src/lib.rs
+++ b/crates/test_utils/src/lib.rs
@@ -63,7 +63,7 @@ pub fn extract_offset(text: &str) -> (TextSize, String) {
63 } 63 }
64} 64}
65 65
66/// Returns the offset of the first occurence of `$0` marker and the copy of `text` 66/// Returns the offset of the first occurrence of `$0` marker and the copy of `text`
67/// without the marker. 67/// without the marker.
68fn try_extract_offset(text: &str) -> Option<(TextSize, String)> { 68fn try_extract_offset(text: &str) -> Option<(TextSize, String)> {
69 let cursor_pos = text.find(CURSOR_MARKER)?; 69 let cursor_pos = text.find(CURSOR_MARKER)?;