Add source root

This commit is contained in:
Aleksey Kladov 2018-09-02 20:08:58 +03:00
parent e98d8cd255
commit 440dc41dd8
5 changed files with 167 additions and 132 deletions

View file

@ -4,13 +4,9 @@ use std::{
atomic::{AtomicBool, Ordering::SeqCst},
},
fmt,
time::Instant,
collections::{HashMap, HashSet, VecDeque},
panic,
collections::{HashSet, VecDeque},
};
use rayon::prelude::*;
use once_cell::sync::OnceCell;
use libeditor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit};
use libsyntax2::{
TextUnit, TextRange, SmolStr, File, AstNode,
@ -20,10 +16,9 @@ use libsyntax2::{
use {
FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
module_map::Problem,
symbol_index::FileSymbols,
module_map::{ModuleMap, ChangeKind},
JobToken, CrateGraph, CrateId,
module_map::Problem,
roots::SourceRoot,
};
#[derive(Debug)]
@ -50,23 +45,7 @@ impl AnalysisHostImpl {
pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) {
let data = self.data_mut();
for (file_id, text) in changes {
let change_kind = if data.file_map.remove(&file_id).is_some() {
if text.is_some() {
ChangeKind::Update
} else {
ChangeKind::Delete
}
} else {
ChangeKind::Insert
};
data.module_map.update_file(file_id, change_kind);
data.file_map.remove(&file_id);
if let Some(text) = text {
let file_data = FileData::new(text);
data.file_map.insert(file_id, Arc::new(file_data));
} else {
data.file_map.remove(&file_id);
}
data.root.update(file_id, text);
}
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
@ -106,31 +85,18 @@ impl Clone for AnalysisImpl {
}
impl AnalysisImpl {
pub fn file_syntax(&self, file_id: FileId) -> File {
self.file_data(file_id).syntax().clone()
pub fn file_syntax(&self, file_id: FileId) -> &File {
self.data.root.syntax(file_id)
}
pub fn file_line_index(&self, id: FileId) -> LineIndex {
let data = self.file_data(id);
data
.lines
.get_or_init(|| LineIndex::new(&data.text))
.clone()
pub fn file_line_index(&self, file_id: FileId) -> &LineIndex {
self.data.root.lines(file_id)
}
pub fn world_symbols(&self, mut query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.reindex();
self.data.file_map.iter()
.take_while(move |_| !token.is_canceled())
.flat_map(move |(id, data)| {
let symbols = data.symbols();
query.process(symbols).into_iter().map(move |s| (*id, s))
})
.collect()
query.search(&self.data.root.symbols(), token)
}
pub fn parent_module(&self, id: FileId) -> Vec<(FileId, FileSymbol)> {
let module_map = &self.data.module_map;
let module_map = self.data.root.module_map();
let id = module_map.file2module(id);
module_map
.parent_modules(
@ -152,7 +118,7 @@ impl AnalysisImpl {
}
pub fn crate_for(&self, id: FileId) -> Vec<CrateId> {
let module_map = &self.data.module_map;
let module_map = self.data.root.module_map();
let crate_graph = &self.data.crate_graph;
let mut res = Vec::new();
let mut work = VecDeque::new();
@ -222,7 +188,7 @@ impl AnalysisImpl {
.map(|d| Diagnostic { range: d.range, message: d.msg, fix: None })
.collect::<Vec<_>>();
self.data.module_map.problems(
self.data.root.module_map().problems(
file_id,
&*self.file_resolver,
&|file_id| self.file_syntax(file_id),
@ -296,7 +262,7 @@ impl AnalysisImpl {
Some(name) => name.text(),
None => return Vec::new(),
};
let module_map = &self.data.module_map;
let module_map = self.data.root.module_map();
let id = module_map.file2module(id);
module_map
.child_module_by_name(
@ -311,19 +277,7 @@ impl AnalysisImpl {
fn reindex(&self) {
if self.needs_reindex.compare_and_swap(true, false, SeqCst) {
let now = Instant::now();
let data = &*self.data;
data.file_map
.par_iter()
.for_each(|(_, data)| drop(data.symbols()));
info!("parallel indexing took {:?}", now.elapsed());
}
}
fn file_data(&self, file_id: FileId) -> Arc<FileData> {
match self.data.file_map.get(&file_id) {
Some(data) => data.clone(),
None => panic!("unknown file: {:?}", file_id),
self.data.root.reindex();
}
}
}
@ -331,50 +285,7 @@ impl AnalysisImpl {
#[derive(Clone, Default, Debug)]
struct WorldData {
crate_graph: CrateGraph,
file_map: HashMap<FileId, Arc<FileData>>,
module_map: ModuleMap,
}
#[derive(Debug)]
struct FileData {
text: String,
symbols: OnceCell<FileSymbols>,
syntax: OnceCell<File>,
lines: OnceCell<LineIndex>,
}
impl FileData {
fn new(text: String) -> FileData {
FileData {
text,
symbols: OnceCell::new(),
syntax: OnceCell::new(),
lines: OnceCell::new(),
}
}
fn syntax(&self) -> &File {
let text = &self.text;
let syntax = &self.syntax;
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
Ok(file) => file,
Err(err) => {
error!("Parser paniced on:\n------\n{}\n------\n", &self.text);
panic::resume_unwind(err)
}
}
}
fn syntax_transient(&self) -> File {
self.syntax.get().map(|s| s.clone())
.unwrap_or_else(|| File::parse(&self.text))
}
fn symbols(&self) -> &FileSymbols {
let syntax = self.syntax_transient();
self.symbols
.get_or_init(|| FileSymbols::new(&syntax))
}
root: SourceRoot,
}
impl SourceChange {

View file

@ -14,6 +14,7 @@ mod symbol_index;
mod module_map;
mod imp;
mod job;
mod roots;
use std::{
sync::Arc,
@ -146,10 +147,10 @@ pub struct Analysis {
impl Analysis {
pub fn file_syntax(&self, file_id: FileId) -> File {
self.imp.file_syntax(file_id)
self.imp.file_syntax(file_id).clone()
}
pub fn file_line_index(&self, file_id: FileId) -> LineIndex {
self.imp.file_line_index(file_id)
self.imp.file_line_index(file_id).clone()
}
pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange {
libeditor::extend_selection(file, range).unwrap_or(range)

View file

@ -8,7 +8,7 @@ use libsyntax2::{
};
use {FileId, FileResolver};
type SyntaxProvider<'a> = dyn Fn(FileId) -> File + 'a;
type SyntaxProvider<'a> = dyn Fn(FileId) -> &'a File + 'a;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct ModuleId(FileId);

View file

@ -0,0 +1,113 @@
use std::{
collections::HashMap,
time::Instant,
sync::Arc,
panic,
};
use once_cell::sync::OnceCell;
use rayon::prelude::*;
use libeditor::LineIndex;
use libsyntax2::File;
use {
FileId,
module_map::{ModuleMap, ChangeKind},
symbol_index::FileSymbols,
};
#[derive(Clone, Default, Debug)]
pub(crate) struct SourceRoot {
file_map: HashMap<FileId, Arc<FileData>>,
module_map: ModuleMap,
}
impl SourceRoot {
pub fn update(&mut self, file_id: FileId, text: Option<String>) {
let change_kind = if self.file_map.remove(&file_id).is_some() {
if text.is_some() {
ChangeKind::Update
} else {
ChangeKind::Delete
}
} else {
ChangeKind::Insert
};
self.module_map.update_file(file_id, change_kind);
self.file_map.remove(&file_id);
if let Some(text) = text {
let file_data = FileData::new(text);
self.file_map.insert(file_id, Arc::new(file_data));
} else {
self.file_map.remove(&file_id);
}
}
pub fn module_map(&self) -> &ModuleMap {
&self.module_map
}
pub fn lines(&self, file_id: FileId) -> &LineIndex {
let data = self.data(file_id);
data.lines.get_or_init(|| LineIndex::new(&data.text))
}
pub fn syntax(&self, file_id: FileId) -> &File {
let data = self.data(file_id);
let text = &data.text;
let syntax = &data.syntax;
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
Ok(file) => file,
Err(err) => {
error!("Parser paniced on:\n------\n{}\n------\n", &data.text);
panic::resume_unwind(err)
}
}
}
pub(crate) fn symbols(&self) -> Vec<(FileId, &FileSymbols)> {
self.file_map
.iter()
.map(|(&file_id, data)| (file_id, data.symbols()))
.collect()
}
pub fn reindex(&self) {
let now = Instant::now();
self.file_map
.par_iter()
.for_each(|(_, data)| {
data.symbols();
});
info!("parallel indexing took {:?}", now.elapsed());
}
fn data(&self, file_id: FileId) -> &FileData {
match self.file_map.get(&file_id) {
Some(data) => data,
None => panic!("unknown file: {:?}", file_id),
}
}
}
#[derive(Debug)]
struct FileData {
text: String,
lines: OnceCell<LineIndex>,
syntax: OnceCell<File>,
symbols: OnceCell<FileSymbols>,
}
impl FileData {
fn new(text: String) -> FileData {
FileData {
text,
symbols: OnceCell::new(),
syntax: OnceCell::new(),
lines: OnceCell::new(),
}
}
fn syntax_transient(&self) -> File {
self.syntax.get().map(|s| s.clone())
.unwrap_or_else(|| File::parse(&self.text))
}
fn symbols(&self) -> &FileSymbols {
let syntax = self.syntax_transient();
self.symbols.get_or_init(|| FileSymbols::new(&syntax))
}
}

View file

@ -3,8 +3,8 @@ use libsyntax2::{
File,
SyntaxKind::{self, *},
};
use fst::{self, IntoStreamer, Streamer};
use Query;
use fst::{self, Streamer};
use {Query, FileId, JobToken};
#[derive(Debug)]
pub(crate) struct FileSymbols {
@ -32,35 +32,45 @@ impl FileSymbols {
}
impl Query {
pub(crate) fn process(
&mut self,
file: &FileSymbols,
) -> Vec<FileSymbol> {
fn is_type(kind: SyntaxKind) -> bool {
match kind {
STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true,
_ => false,
}
pub(crate) fn search(
mut self,
indices: &[(FileId, &FileSymbols)],
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
let mut op = fst::map::OpBuilder::new();
for (_, file_symbols) in indices.iter() {
let automaton = fst::automaton::Subsequence::new(&self.lowercased);
op = op.add(file_symbols.map.search(automaton))
}
let automaton = fst::automaton::Subsequence::new(&self.lowercased);
let mut stream = file.map.search(automaton).into_stream();
let mut stream = op.union();
let mut res = Vec::new();
while let Some((_, idx)) = stream.next() {
if self.limit == 0 {
while let Some((_, indexed_values)) = stream.next() {
if self.limit == 0 || token.is_canceled() {
break;
}
let idx = idx as usize;
let symbol = &file.symbols[idx];
if self.only_types && !is_type(symbol.kind) {
continue;
for indexed_value in indexed_values {
let (file_id, file_symbols) = &indices[indexed_value.index];
let idx = indexed_value.value as usize;
let symbol = &file_symbols.symbols[idx];
if self.only_types && !is_type(symbol.kind) {
continue;
}
if self.exact && symbol.name != self.query {
continue;
}
res.push((*file_id, symbol.clone()));
self.limit -= 1;
}
if self.exact && symbol.name != self.query {
continue;
}
res.push(symbol.clone());
self.limit -= 1;
}
res
}
}
fn is_type(kind: SyntaxKind) -> bool {
match kind {
STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true,
_ => false,
}
}