diff options
-rw-r--r-- | Cargo.lock | 1 | ||||
-rw-r--r-- | crates/ide/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/ide/src/diagnostics.rs | 2 | ||||
-rw-r--r-- | crates/syntax/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/syntax/src/algo.rs | 87 |
5 files changed, 68 insertions, 25 deletions
diff --git a/Cargo.lock b/Cargo.lock index e819297bd..59f743efa 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1607,6 +1607,7 @@ version = "0.0.0" | |||
1607 | dependencies = [ | 1607 | dependencies = [ |
1608 | "arrayvec", | 1608 | "arrayvec", |
1609 | "expect-test", | 1609 | "expect-test", |
1610 | "indexmap", | ||
1610 | "itertools", | 1611 | "itertools", |
1611 | "once_cell", | 1612 | "once_cell", |
1612 | "parser", | 1613 | "parser", |
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 63299dc31..76b52fa04 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml | |||
@@ -11,7 +11,7 @@ doctest = false | |||
11 | 11 | ||
12 | [dependencies] | 12 | [dependencies] |
13 | either = "1.5.3" | 13 | either = "1.5.3" |
14 | indexmap = "1.3.2" | 14 | indexmap = "1.4.0" |
15 | itertools = "0.9.0" | 15 | itertools = "0.9.0" |
16 | log = "0.4.8" | 16 | log = "0.4.8" |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index 90574cb35..232074c3d 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -613,7 +613,7 @@ fn main() { | |||
613 | pub struct Foo { pub a: i32, pub b: i32 } | 613 | pub struct Foo { pub a: i32, pub b: i32 } |
614 | "#, | 614 | "#, |
615 | r#" | 615 | r#" |
616 | fn {a:42, b: ()} {} | 616 | fn some(, b: ()} {} |
617 | fn items() {} | 617 | fn items() {} |
618 | fn here() {} | 618 | fn here() {} |
619 | 619 | ||
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index c343f2f70..8c0b0abbb 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -17,6 +17,7 @@ rustc_lexer = { version = "683.0.0", package = "rustc-ap-rustc_lexer" } | |||
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | arrayvec = "0.5.1" | 18 | arrayvec = "0.5.1" |
19 | once_cell = "1.3.1" | 19 | once_cell = "1.3.1" |
20 | indexmap = "1.4.0" | ||
20 | # This crate transitively depends on `smol_str` via `rowan`. | 21 | # This crate transitively depends on `smol_str` via `rowan`. |
21 | # ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here | 22 | # ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here |
22 | # to reduce number of compilations | 23 | # to reduce number of compilations |
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index ea199f9b8..f53875f28 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -2,9 +2,11 @@ | |||
2 | 2 | ||
3 | use std::{ | 3 | use std::{ |
4 | fmt, | 4 | fmt, |
5 | hash::BuildHasherDefault, | ||
5 | ops::{self, RangeInclusive}, | 6 | ops::{self, RangeInclusive}, |
6 | }; | 7 | }; |
7 | 8 | ||
9 | use indexmap::IndexMap; | ||
8 | use itertools::Itertools; | 10 | use itertools::Itertools; |
9 | use rustc_hash::FxHashMap; | 11 | use rustc_hash::FxHashMap; |
10 | use text_edit::TextEditBuilder; | 12 | use text_edit::TextEditBuilder; |
@@ -106,42 +108,56 @@ pub enum InsertPosition<T> { | |||
106 | After(T), | 108 | After(T), |
107 | } | 109 | } |
108 | 110 | ||
111 | type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>; | ||
112 | |||
109 | pub struct TreeDiff { | 113 | pub struct TreeDiff { |
110 | replacements: FxHashMap<SyntaxElement, SyntaxElement>, | 114 | replacements: FxHashMap<SyntaxElement, SyntaxElement>, |
115 | deletions: Vec<SyntaxElement>, | ||
116 | // the vec as well as the indexmap are both here to preserve order | ||
117 | insertions: FxIndexMap<SyntaxElement, Vec<SyntaxElement>>, | ||
111 | } | 118 | } |
112 | 119 | ||
113 | impl TreeDiff { | 120 | impl TreeDiff { |
114 | pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { | 121 | pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { |
122 | for (anchor, to) in self.insertions.iter() { | ||
123 | to.iter().for_each(|to| builder.insert(anchor.text_range().end(), to.to_string())); | ||
124 | } | ||
115 | for (from, to) in self.replacements.iter() { | 125 | for (from, to) in self.replacements.iter() { |
116 | builder.replace(from.text_range(), to.to_string()) | 126 | builder.replace(from.text_range(), to.to_string()) |
117 | } | 127 | } |
128 | for text_range in self.deletions.iter().map(SyntaxElement::text_range) { | ||
129 | builder.delete(text_range); | ||
130 | } | ||
118 | } | 131 | } |
119 | 132 | ||
120 | pub fn is_empty(&self) -> bool { | 133 | pub fn is_empty(&self) -> bool { |
121 | self.replacements.is_empty() | 134 | self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty() |
122 | } | 135 | } |
123 | } | 136 | } |
124 | 137 | ||
125 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. | 138 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. |
126 | /// | 139 | /// |
127 | /// Specifically, returns a map whose keys are descendants of `from` and values | 140 | /// Specifically, returns a structure that consists of a replacements, insertions and deletions |
128 | /// are descendants of `to`, such that `replace_descendants(from, map) == to`. | 141 | /// such that applying this map on `from` will result in `to`. |
129 | /// | 142 | /// |
130 | /// A trivial solution is a singleton map `{ from: to }`, but this function | 143 | /// This function tries to find a fine-grained diff. |
131 | /// tries to find a more fine-grained diff. | ||
132 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | 144 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { |
133 | let mut buf = FxHashMap::default(); | 145 | let mut diff = TreeDiff { |
146 | replacements: FxHashMap::default(), | ||
147 | insertions: FxIndexMap::default(), | ||
148 | deletions: Vec::new(), | ||
149 | }; | ||
150 | let (from, to) = (from.clone().into(), to.clone().into()); | ||
151 | |||
134 | // FIXME: this is both horrible inefficient and gives larger than | 152 | // FIXME: this is both horrible inefficient and gives larger than |
135 | // necessary diff. I bet there's a cool algorithm to diff trees properly. | 153 | // necessary diff. I bet there's a cool algorithm to diff trees properly. |
136 | go(&mut buf, from.clone().into(), to.clone().into()); | 154 | if !syntax_element_eq(&from, &to) { |
137 | return TreeDiff { replacements: buf }; | 155 | go(&mut diff, from, to); |
156 | } | ||
157 | return diff; | ||
138 | 158 | ||
139 | fn go( | 159 | fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool { |
140 | buf: &mut FxHashMap<SyntaxElement, SyntaxElement>, | 160 | lhs.kind() == rhs.kind() |
141 | lhs: SyntaxElement, | ||
142 | rhs: SyntaxElement, | ||
143 | ) { | ||
144 | if lhs.kind() == rhs.kind() | ||
145 | && lhs.text_range().len() == rhs.text_range().len() | 161 | && lhs.text_range().len() == rhs.text_range().len() |
146 | && match (&lhs, &rhs) { | 162 | && match (&lhs, &rhs) { |
147 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { | 163 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { |
@@ -150,18 +166,43 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | |||
150 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), | 166 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), |
151 | _ => false, | 167 | _ => false, |
152 | } | 168 | } |
153 | { | 169 | } |
154 | return; | 170 | |
155 | } | 171 | fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) { |
156 | if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) { | 172 | let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) { |
157 | if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() { | 173 | Some((lhs, rhs)) => (lhs, rhs), |
158 | for (lhs, rhs) in lhs.children_with_tokens().zip(rhs.children_with_tokens()) { | 174 | _ => { |
159 | go(buf, lhs, rhs) | 175 | diff.replacements.insert(lhs, rhs); |
160 | } | ||
161 | return; | 176 | return; |
162 | } | 177 | } |
178 | }; | ||
179 | |||
180 | let mut rhs_children = rhs.children_with_tokens(); | ||
181 | let mut lhs_children = lhs.children_with_tokens(); | ||
182 | let mut last_lhs = None; | ||
183 | loop { | ||
184 | let lhs_child = lhs_children.next(); | ||
185 | match (lhs_child.clone(), rhs_children.next()) { | ||
186 | (None, None) => break, | ||
187 | (None, Some(element)) => match last_lhs.clone() { | ||
188 | Some(prev) => { | ||
189 | diff.insertions.entry(prev).or_insert_with(Vec::new).push(element); | ||
190 | } | ||
191 | // first iteration, this means we got no anchor element to insert after | ||
192 | // therefor replace the parent node instead | ||
193 | None => { | ||
194 | diff.replacements.insert(lhs.clone().into(), rhs.clone().into()); | ||
195 | break; | ||
196 | } | ||
197 | }, | ||
198 | (Some(element), None) => { | ||
199 | diff.deletions.push(element); | ||
200 | } | ||
201 | (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {} | ||
202 | (Some(lhs_ele), Some(rhs_ele)) => go(diff, lhs_ele, rhs_ele), | ||
203 | } | ||
204 | last_lhs = lhs_child.or(last_lhs); | ||
163 | } | 205 | } |
164 | buf.insert(lhs, rhs); | ||
165 | } | 206 | } |
166 | } | 207 | } |
167 | 208 | ||