diff --git a/crates/ra_hir_ty/src/tests/macros.rs b/crates/ra_hir_ty/src/tests/macros.rs index ff4599b712..f2a9b1c408 100644 --- a/crates/ra_hir_ty/src/tests/macros.rs +++ b/crates/ra_hir_ty/src/tests/macros.rs @@ -1,10 +1,13 @@ +use std::fs; + use insta::assert_snapshot; use ra_db::fixture::WithFixture; - -use super::{infer, type_at, type_at_pos}; +use test_utils::project_dir; use crate::test_db::TestDB; +use super::{infer, type_at, type_at_pos}; + #[test] fn cfg_impl_def() { let (db, pos) = TestDB::with_position( @@ -481,6 +484,30 @@ fn bar() -> u32 {0} assert_eq!("u32", type_at_pos(&db, pos)); } +#[test] +#[ignore] +fn include_accidentally_quadratic() { + let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic"); + let big_file = fs::read_to_string(file).unwrap(); + let big_file = vec![big_file; 10].join("\n"); + + let fixture = r#" +//- /main.rs +#[rustc_builtin_macro] +macro_rules! include {() => {}} + +include!("foo.rs"); + +fn main() { + RegisterBlock { }<|>; +} + "#; + let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file); + + let (db, pos) = TestDB::with_position(&fixture); + assert_eq!("RegisterBlock", type_at_pos(&db, pos)); +} + #[test] fn infer_builtin_macros_include_concat() { let (db, pos) = TestDB::with_position( diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 8e8ae2b294..9fb5cb0583 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -137,21 +137,23 @@ impl TokenMap { token_id: tt::TokenId, open_relative_range: TextRange, close_relative_range: TextRange, - ) { + ) -> usize { + let res = self.entries.len(); self.entries .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); + res } - fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) { - if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) { - if let TokenTextRange::Delimiter(dim, _) = entry.1 { - entry.1 = TokenTextRange::Delimiter(dim, close_relative_range); - } + fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { + let (_, token_text_range) = &mut self.entries[idx]; + if let TokenTextRange::Delimiter(dim, _) = token_text_range { + *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range); } } - fn remove_delim(&mut self, token_id: tt::TokenId) { - self.entries.retain(|(tid, _)| *tid != token_id); + fn remove_delim(&mut self, idx: usize) { + // FIXME: This could be accidently quadratic + self.entries.remove(idx); } } @@ -238,24 +240,24 @@ impl TokenIdAlloc { token_id } - fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId { + fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) { let token_id = tt::TokenId(self.next_id); self.next_id += 1; - self.map.insert_delim( + let idx = self.map.insert_delim( token_id, open_abs_range - self.global_offset, open_abs_range - self.global_offset, ); - token_id + (token_id, idx) } - fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option) { + fn close_delim(&mut self, idx: usize, close_abs_range: Option) { match close_abs_range { None => { - self.map.remove_delim(id); + self.map.remove_delim(idx); } Some(close) => { - self.map.update_close_delim(id, close - self.global_offset); + self.map.update_close_delim(idx, close - self.global_offset); } } } @@ -322,7 +324,7 @@ trait TokenConvertor { if let Some((kind, closed)) = delim { let mut subtree = tt::Subtree::default(); - let id = self.id_alloc().open_delim(range); + let (id, idx) = self.id_alloc().open_delim(range); subtree.delimiter = Some(tt::Delimiter { kind, id }); while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { @@ -331,7 +333,7 @@ trait TokenConvertor { let last_range = match self.bump() { None => { // For error resilience, we insert an char punct for the opening delim here - self.id_alloc().close_delim(id, None); + self.id_alloc().close_delim(idx, None); let leaf: tt::Leaf = tt::Punct { id: self.id_alloc().alloc(range), char: token.to_char().unwrap(), @@ -344,7 +346,7 @@ trait TokenConvertor { } Some(it) => it.1, }; - self.id_alloc().close_delim(id, Some(last_range)); + self.id_alloc().close_delim(idx, Some(last_range)); subtree.into() } else { let spacing = match self.peek() { diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs index 00ea3a9b06..2d4f68f5e0 100644 --- a/crates/ra_prof/src/lib.rs +++ b/crates/ra_prof/src/lib.rs @@ -113,21 +113,6 @@ pub fn profile(label: Label) -> Profiler { }) } -pub fn print_time(label: Label) -> impl Drop { - struct Guard { - label: Label, - start: Instant, - } - - impl Drop for Guard { - fn drop(&mut self) { - eprintln!("{}: {:?}", self.label, self.start.elapsed()) - } - } - - Guard { label, start: Instant::now() } -} - pub struct Profiler { label: Option