mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-27 20:35:09 +00:00
Fix accidently quadratic behavior when processing includes
closes #3927
This commit is contained in:
parent
5e2f29af85
commit
428d4dfe71
1 changed files with 19 additions and 17 deletions
|
@ -137,21 +137,23 @@ impl TokenMap {
|
||||||
token_id: tt::TokenId,
|
token_id: tt::TokenId,
|
||||||
open_relative_range: TextRange,
|
open_relative_range: TextRange,
|
||||||
close_relative_range: TextRange,
|
close_relative_range: TextRange,
|
||||||
) {
|
) -> usize {
|
||||||
|
let res = self.entries.len();
|
||||||
self.entries
|
self.entries
|
||||||
.push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
|
.push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) {
|
fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
|
||||||
if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) {
|
let (_, token_text_range) = &mut self.entries[idx];
|
||||||
if let TokenTextRange::Delimiter(dim, _) = entry.1 {
|
if let TokenTextRange::Delimiter(dim, _) = token_text_range {
|
||||||
entry.1 = TokenTextRange::Delimiter(dim, close_relative_range);
|
*token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_delim(&mut self, token_id: tt::TokenId) {
|
fn remove_delim(&mut self, idx: usize) {
|
||||||
self.entries.retain(|(tid, _)| *tid != token_id);
|
// FIXME: This could be accidently quadratic
|
||||||
|
self.entries.remove(idx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,24 +240,24 @@ impl TokenIdAlloc {
|
||||||
token_id
|
token_id
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
|
fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
|
||||||
let token_id = tt::TokenId(self.next_id);
|
let token_id = tt::TokenId(self.next_id);
|
||||||
self.next_id += 1;
|
self.next_id += 1;
|
||||||
self.map.insert_delim(
|
let idx = self.map.insert_delim(
|
||||||
token_id,
|
token_id,
|
||||||
open_abs_range - self.global_offset,
|
open_abs_range - self.global_offset,
|
||||||
open_abs_range - self.global_offset,
|
open_abs_range - self.global_offset,
|
||||||
);
|
);
|
||||||
token_id
|
(token_id, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option<TextRange>) {
|
fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
|
||||||
match close_abs_range {
|
match close_abs_range {
|
||||||
None => {
|
None => {
|
||||||
self.map.remove_delim(id);
|
self.map.remove_delim(idx);
|
||||||
}
|
}
|
||||||
Some(close) => {
|
Some(close) => {
|
||||||
self.map.update_close_delim(id, close - self.global_offset);
|
self.map.update_close_delim(idx, close - self.global_offset);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -322,7 +324,7 @@ trait TokenConvertor {
|
||||||
|
|
||||||
if let Some((kind, closed)) = delim {
|
if let Some((kind, closed)) = delim {
|
||||||
let mut subtree = tt::Subtree::default();
|
let mut subtree = tt::Subtree::default();
|
||||||
let id = self.id_alloc().open_delim(range);
|
let (id, idx) = self.id_alloc().open_delim(range);
|
||||||
subtree.delimiter = Some(tt::Delimiter { kind, id });
|
subtree.delimiter = Some(tt::Delimiter { kind, id });
|
||||||
|
|
||||||
while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
|
while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
|
||||||
|
@ -331,7 +333,7 @@ trait TokenConvertor {
|
||||||
let last_range = match self.bump() {
|
let last_range = match self.bump() {
|
||||||
None => {
|
None => {
|
||||||
// For error resilience, we insert an char punct for the opening delim here
|
// For error resilience, we insert an char punct for the opening delim here
|
||||||
self.id_alloc().close_delim(id, None);
|
self.id_alloc().close_delim(idx, None);
|
||||||
let leaf: tt::Leaf = tt::Punct {
|
let leaf: tt::Leaf = tt::Punct {
|
||||||
id: self.id_alloc().alloc(range),
|
id: self.id_alloc().alloc(range),
|
||||||
char: token.to_char().unwrap(),
|
char: token.to_char().unwrap(),
|
||||||
|
@ -344,7 +346,7 @@ trait TokenConvertor {
|
||||||
}
|
}
|
||||||
Some(it) => it.1,
|
Some(it) => it.1,
|
||||||
};
|
};
|
||||||
self.id_alloc().close_delim(id, Some(last_range));
|
self.id_alloc().close_delim(idx, Some(last_range));
|
||||||
subtree.into()
|
subtree.into()
|
||||||
} else {
|
} else {
|
||||||
let spacing = match self.peek() {
|
let spacing = match self.peek() {
|
||||||
|
|
Loading…
Reference in a new issue