3933: Fix accidently quadratic behavior when processing include! r=matklad a=matklad

This fixes the immediate problem behind #3927. It doesn't yet fix the deeper problem with `to_node` being quadratic (hence the test is ignored), but it is a good start anyway.

bors r+

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-04-10 19:16:32 +00:00 committed by GitHub
commit ca9a5dd165
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 63 additions and 35 deletions

View file

@ -1,10 +1,13 @@
use std::fs;
use insta::assert_snapshot;
use ra_db::fixture::WithFixture;
use super::{infer, type_at, type_at_pos};
use test_utils::project_dir;
use crate::test_db::TestDB;
use super::{infer, type_at, type_at_pos};
#[test]
fn cfg_impl_def() {
let (db, pos) = TestDB::with_position(
@ -481,6 +484,30 @@ fn bar() -> u32 {0}
assert_eq!("u32", type_at_pos(&db, pos));
}
#[test]
#[ignore]
fn include_accidentally_quadratic() {
let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic");
let big_file = fs::read_to_string(file).unwrap();
let big_file = vec![big_file; 10].join("\n");
let fixture = r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
include!("foo.rs");
fn main() {
RegisterBlock { }<|>;
}
"#;
let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file);
let (db, pos) = TestDB::with_position(&fixture);
assert_eq!("RegisterBlock", type_at_pos(&db, pos));
}
#[test]
fn infer_builtin_macros_include_concat() {
let (db, pos) = TestDB::with_position(

View file

@ -137,21 +137,23 @@ impl TokenMap {
token_id: tt::TokenId,
open_relative_range: TextRange,
close_relative_range: TextRange,
) {
) -> usize {
let res = self.entries.len();
self.entries
.push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
res
}
fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) {
if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) {
if let TokenTextRange::Delimiter(dim, _) = entry.1 {
entry.1 = TokenTextRange::Delimiter(dim, close_relative_range);
}
fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
let (_, token_text_range) = &mut self.entries[idx];
if let TokenTextRange::Delimiter(dim, _) = token_text_range {
*token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range);
}
}
fn remove_delim(&mut self, token_id: tt::TokenId) {
self.entries.retain(|(tid, _)| *tid != token_id);
fn remove_delim(&mut self, idx: usize) {
// FIXME: This could be accidently quadratic
self.entries.remove(idx);
}
}
@ -238,24 +240,24 @@ impl TokenIdAlloc {
token_id
}
fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
let token_id = tt::TokenId(self.next_id);
self.next_id += 1;
self.map.insert_delim(
let idx = self.map.insert_delim(
token_id,
open_abs_range - self.global_offset,
open_abs_range - self.global_offset,
);
token_id
(token_id, idx)
}
fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option<TextRange>) {
fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
match close_abs_range {
None => {
self.map.remove_delim(id);
self.map.remove_delim(idx);
}
Some(close) => {
self.map.update_close_delim(id, close - self.global_offset);
self.map.update_close_delim(idx, close - self.global_offset);
}
}
}
@ -322,7 +324,7 @@ trait TokenConvertor {
if let Some((kind, closed)) = delim {
let mut subtree = tt::Subtree::default();
let id = self.id_alloc().open_delim(range);
let (id, idx) = self.id_alloc().open_delim(range);
subtree.delimiter = Some(tt::Delimiter { kind, id });
while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
@ -331,7 +333,7 @@ trait TokenConvertor {
let last_range = match self.bump() {
None => {
// For error resilience, we insert an char punct for the opening delim here
self.id_alloc().close_delim(id, None);
self.id_alloc().close_delim(idx, None);
let leaf: tt::Leaf = tt::Punct {
id: self.id_alloc().alloc(range),
char: token.to_char().unwrap(),
@ -344,7 +346,7 @@ trait TokenConvertor {
}
Some(it) => it.1,
};
self.id_alloc().close_delim(id, Some(last_range));
self.id_alloc().close_delim(idx, Some(last_range));
subtree.into()
} else {
let spacing = match self.peek() {

View file

@ -113,21 +113,6 @@ pub fn profile(label: Label) -> Profiler {
})
}
pub fn print_time(label: Label) -> impl Drop {
struct Guard {
label: Label,
start: Instant,
}
impl Drop for Guard {
fn drop(&mut self) {
eprintln!("{}: {:?}", self.label, self.start.elapsed())
}
}
Guard { label, start: Instant::now() }
}
pub struct Profiler {
label: Option<Label>,
detail: Option<String>,

View file

@ -1,6 +1,6 @@
//! Missing batteries for standard libraries.
use std::{cell::Cell, fmt};
use std::{cell::Cell, fmt, time::Instant};
#[inline(always)]
pub fn is_ci() -> bool {
@ -88,3 +88,17 @@ where
Ok(())
}
}
pub fn timeit(label: &'static str) -> impl Drop {
struct Guard {
label: &'static str,
start: Instant,
}
impl Drop for Guard {
fn drop(&mut self) {
eprintln!("{}: {:?}", self.label, self.start.elapsed())
}
}
Guard { label, start: Instant::now() }
}