dispatch acros roots

This commit is contained in:
Aleksey Kladov 2018-09-03 19:46:30 +03:00
parent 2f2feef9af
commit b04c14d4ad
5 changed files with 180 additions and 75 deletions

View file

@ -17,8 +17,8 @@ use libsyntax2::{
use { use {
FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit, FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
JobToken, CrateGraph, CrateId, JobToken, CrateGraph, CrateId,
module_map::Problem, module_map::{ModuleMap, Problem},
roots::SourceRoot, roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
}; };
#[derive(Debug)] #[derive(Debug)]
@ -57,6 +57,10 @@ impl AnalysisHostImpl {
} }
self.data_mut().crate_graph = graph; self.data_mut().crate_graph = graph;
} }
pub fn set_libraries(&mut self, libs: impl Iterator<Item=impl Iterator<Item=(FileId, String)>>) {
let libs = libs.map(ReadonlySourceRoot::new).collect::<Vec<_>>();
self.data_mut().libs = Arc::new(libs);
}
fn data_mut(&mut self) -> &mut WorldData { fn data_mut(&mut self) -> &mut WorldData {
Arc::make_mut(&mut self.data) Arc::make_mut(&mut self.data)
} }
@ -85,19 +89,33 @@ impl Clone for AnalysisImpl {
} }
impl AnalysisImpl { impl AnalysisImpl {
fn root(&self, file_id: FileId) -> &SourceRoot {
if self.data.root.contains(file_id) {
return &self.data.root;
}
self.data.libs.iter().find(|it| it.contains(file_id)).unwrap()
}
pub fn file_syntax(&self, file_id: FileId) -> &File { pub fn file_syntax(&self, file_id: FileId) -> &File {
self.data.root.syntax(file_id) self.root(file_id).syntax(file_id)
} }
pub fn file_line_index(&self, file_id: FileId) -> &LineIndex { pub fn file_line_index(&self, file_id: FileId) -> &LineIndex {
self.data.root.lines(file_id) self.root(file_id).lines(file_id)
} }
pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.reindex(); self.reindex();
query.search(&self.data.root.symbols(), token) let mut buf = Vec::new();
if query.libs {
self.data.libs.iter()
.for_each(|it| it.symbols(&mut buf));
} else {
self.data.root.symbols(&mut buf);
}
query.search(&buf, token)
} }
pub fn parent_module(&self, id: FileId) -> Vec<(FileId, FileSymbol)> { pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
let module_map = self.data.root.module_map(); let module_map = self.root(file_id).module_map();
let id = module_map.file2module(id); let id = module_map.file2module(file_id);
module_map module_map
.parent_modules( .parent_modules(
id, id,
@ -117,12 +135,12 @@ impl AnalysisImpl {
.collect() .collect()
} }
pub fn crate_for(&self, id: FileId) -> Vec<CrateId> { pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
let module_map = self.data.root.module_map(); let module_map = self.root(file_id).module_map();
let crate_graph = &self.data.crate_graph; let crate_graph = &self.data.crate_graph;
let mut res = Vec::new(); let mut res = Vec::new();
let mut work = VecDeque::new(); let mut work = VecDeque::new();
work.push_back(id); work.push_back(file_id);
let mut visited = HashSet::new(); let mut visited = HashSet::new();
while let Some(id) = work.pop_front() { while let Some(id) = work.pop_front() {
if let Some(crate_id) = crate_graph.crate_id_for_crate_root(id) { if let Some(crate_id) = crate_graph.crate_id_for_crate_root(id) {
@ -148,11 +166,13 @@ impl AnalysisImpl {
} }
pub fn approximately_resolve_symbol( pub fn approximately_resolve_symbol(
&self, &self,
id: FileId, file_id: FileId,
offset: TextUnit, offset: TextUnit,
token: &JobToken, token: &JobToken,
) -> Vec<(FileId, FileSymbol)> { ) -> Vec<(FileId, FileSymbol)> {
let file = self.file_syntax(id); let root = self.root(file_id);
let module_map = root.module_map();
let file = root.syntax(file_id);
let syntax = file.syntax(); let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) { if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
return self.index_resolve(name_ref, token); return self.index_resolve(name_ref, token);
@ -160,7 +180,7 @@ impl AnalysisImpl {
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) { if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) {
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
if module.has_semi() { if module.has_semi() {
let file_ids = self.resolve_module(id, module); let file_ids = self.resolve_module(module_map, file_id, module);
let res = file_ids.into_iter().map(|id| { let res = file_ids.into_iter().map(|id| {
let name = module.name() let name = module.name()
@ -182,13 +202,16 @@ impl AnalysisImpl {
} }
pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
let syntax = self.file_syntax(file_id); let root = self.root(file_id);
let module_map = root.module_map();
let syntax = root.syntax(file_id);
let mut res = libeditor::diagnostics(&syntax) let mut res = libeditor::diagnostics(&syntax)
.into_iter() .into_iter()
.map(|d| Diagnostic { range: d.range, message: d.msg, fix: None }) .map(|d| Diagnostic { range: d.range, message: d.msg, fix: None })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.data.root.module_map().problems( module_map.problems(
file_id, file_id,
&*self.file_resolver, &*self.file_resolver,
&|file_id| self.file_syntax(file_id), &|file_id| self.file_syntax(file_id),
@ -257,13 +280,12 @@ impl AnalysisImpl {
self.world_symbols(query, token) self.world_symbols(query, token)
} }
fn resolve_module(&self, id: FileId, module: ast::Module) -> Vec<FileId> { fn resolve_module(&self, module_map: &ModuleMap, file_id: FileId, module: ast::Module) -> Vec<FileId> {
let name = match module.name() { let name = match module.name() {
Some(name) => name.text(), Some(name) => name.text(),
None => return Vec::new(), None => return Vec::new(),
}; };
let module_map = self.data.root.module_map(); let id = module_map.file2module(file_id);
let id = module_map.file2module(id);
module_map module_map
.child_module_by_name( .child_module_by_name(
id, name.as_str(), id, name.as_str(),
@ -285,7 +307,8 @@ impl AnalysisImpl {
#[derive(Clone, Default, Debug)] #[derive(Clone, Default, Debug)]
struct WorldData { struct WorldData {
crate_graph: CrateGraph, crate_graph: CrateGraph,
root: SourceRoot, root: WritableSourceRoot,
libs: Arc<Vec<ReadonlySourceRoot>>,
} }
impl SourceChange { impl SourceChange {

View file

@ -68,6 +68,9 @@ impl AnalysisHost {
pub fn set_crate_graph(&mut self, graph: CrateGraph) { pub fn set_crate_graph(&mut self, graph: CrateGraph) {
self.imp.set_crate_graph(graph) self.imp.set_crate_graph(graph)
} }
pub fn set_libraries(&mut self, libs: impl Iterator<Item=impl Iterator<Item=(FileId, String)>>) {
self.imp.set_libraries(libs)
}
} }
#[derive(Debug)] #[derive(Debug)]
@ -114,6 +117,7 @@ pub struct Query {
query: String, query: String,
lowercased: String, lowercased: String,
only_types: bool, only_types: bool,
libs: bool,
exact: bool, exact: bool,
limit: usize, limit: usize,
} }
@ -125,6 +129,7 @@ impl Query {
query, query,
lowercased, lowercased,
only_types: false, only_types: false,
libs: false,
exact: false, exact: false,
limit: usize::max_value() limit: usize::max_value()
} }
@ -132,6 +137,9 @@ impl Query {
pub fn only_types(&mut self) { pub fn only_types(&mut self) {
self.only_types = true; self.only_types = true;
} }
pub fn libs(&mut self) {
self.libs = true;
}
pub fn exact(&mut self) { pub fn exact(&mut self) {
self.exact = true; self.exact = true;
} }

View file

@ -56,6 +56,10 @@ pub enum Problem {
} }
impl ModuleMap { impl ModuleMap {
pub fn new() -> ModuleMap {
Default::default()
}
pub fn update_file(&mut self, file: FileId, change_kind: ChangeKind) { pub fn update_file(&mut self, file: FileId, change_kind: ChangeKind) {
self.state.get_mut().changes.push((file, change_kind)); self.state.get_mut().changes.push((file, change_kind));
} }

View file

@ -13,16 +13,24 @@ use libsyntax2::File;
use { use {
FileId, FileId,
module_map::{ModuleMap, ChangeKind}, module_map::{ModuleMap, ChangeKind},
symbol_index::FileSymbols, symbol_index::SymbolIndex,
}; };
pub(crate) trait SourceRoot {
fn contains(&self, file_id: FileId) -> bool;
fn module_map(&self) -> &ModuleMap;
fn lines(&self, file_id: FileId) -> &LineIndex;
fn syntax(&self, file_id: FileId) -> &File;
fn symbols<'a>(&'a self, acc: &mut Vec<&'a SymbolIndex>);
}
#[derive(Clone, Default, Debug)] #[derive(Clone, Default, Debug)]
pub(crate) struct SourceRoot { pub(crate) struct WritableSourceRoot {
file_map: HashMap<FileId, Arc<(FileData, OnceCell<FileSymbols>)>>, file_map: HashMap<FileId, Arc<(FileData, OnceCell<SymbolIndex>)>>,
module_map: ModuleMap, module_map: ModuleMap,
} }
impl SourceRoot { impl WritableSourceRoot {
pub fn update(&mut self, file_id: FileId, text: Option<String>) { pub fn update(&mut self, file_id: FileId, text: Option<String>) {
let change_kind = if self.file_map.remove(&file_id).is_some() { let change_kind = if self.file_map.remove(&file_id).is_some() {
if text.is_some() { if text.is_some() {
@ -40,31 +48,6 @@ impl SourceRoot {
self.file_map.insert(file_id, Arc::new((file_data, Default::default()))); self.file_map.insert(file_id, Arc::new((file_data, Default::default())));
} }
} }
pub fn module_map(&self) -> &ModuleMap {
&self.module_map
}
pub fn lines(&self, file_id: FileId) -> &LineIndex {
let data = self.data(file_id);
data.lines.get_or_init(|| LineIndex::new(&data.text))
}
pub fn syntax(&self, file_id: FileId) -> &File {
let data = self.data(file_id);
let text = &data.text;
let syntax = &data.syntax;
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
Ok(file) => file,
Err(err) => {
error!("Parser paniced on:\n------\n{}\n------\n", &data.text);
panic::resume_unwind(err)
}
}
}
pub(crate) fn symbols(&self) -> Vec<&FileSymbols> {
self.file_map
.iter()
.map(|(&file_id, data)| symbols(file_id, data))
.collect()
}
pub fn reindex(&self) { pub fn reindex(&self) {
let now = Instant::now(); let now = Instant::now();
self.file_map self.file_map
@ -83,9 +66,31 @@ impl SourceRoot {
} }
} }
fn symbols(file_id: FileId, (data, symbols): &(FileData, OnceCell<FileSymbols>)) -> &FileSymbols { impl SourceRoot for WritableSourceRoot {
fn contains(&self, file_id: FileId) -> bool {
self.file_map.contains_key(&file_id)
}
fn module_map(&self) -> &ModuleMap {
&self.module_map
}
fn lines(&self, file_id: FileId) -> &LineIndex {
self.data(file_id).lines()
}
fn syntax(&self, file_id: FileId) -> &File {
self.data(file_id).syntax()
}
fn symbols<'a>(&'a self, acc: &mut Vec<&'a SymbolIndex>) {
acc.extend(
self.file_map
.iter()
.map(|(&file_id, data)| symbols(file_id, data))
)
}
}
fn symbols(file_id: FileId, (data, symbols): &(FileData, OnceCell<SymbolIndex>)) -> &SymbolIndex {
let syntax = data.syntax_transient(); let syntax = data.syntax_transient();
symbols.get_or_init(|| FileSymbols::new(file_id, &syntax)) symbols.get_or_init(|| SymbolIndex::for_file(file_id, syntax))
} }
#[derive(Debug)] #[derive(Debug)]
@ -103,19 +108,77 @@ impl FileData {
lines: OnceCell::new(), lines: OnceCell::new(),
} }
} }
fn lines(&self) -> &LineIndex {
self.lines.get_or_init(|| LineIndex::new(&self.text))
}
fn syntax(&self) -> &File {
let text = &self.text;
let syntax = &self.syntax;
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
Ok(file) => file,
Err(err) => {
error!("Parser paniced on:\n------\n{}\n------\n", text);
panic::resume_unwind(err)
}
}
}
fn syntax_transient(&self) -> File { fn syntax_transient(&self) -> File {
self.syntax.get().map(|s| s.clone()) self.syntax.get().map(|s| s.clone())
.unwrap_or_else(|| File::parse(&self.text)) .unwrap_or_else(|| File::parse(&self.text))
} }
} }
// #[derive(Clone, Default, Debug)] #[derive(Debug)]
// pub(crate) struct ReadonlySourceRoot { pub(crate) struct ReadonlySourceRoot {
// data: Arc<ReadonlySourceRoot> symbol_index: SymbolIndex,
// } file_map: HashMap<FileId, FileData>,
module_map: ModuleMap,
}
// #[derive(Clone, Default, Debug)] impl ReadonlySourceRoot {
// pub(crate) struct ReadonlySourceRootInner { pub fn new(files: impl Iterator<Item=(FileId, String)>) -> ReadonlySourceRoot {
// file_map: HashMap<FileId, FileData>, let mut module_map = ModuleMap::new();
// module_map: ModuleMap, let file_map: HashMap<FileId, FileData> = files
// } .map(|(id, text)| {
module_map.update_file(id, ChangeKind::Insert);
(id, FileData::new(text))
})
.collect();
let symbol_index = SymbolIndex::for_files(
file_map.par_iter().map(|(&file_id, file_data)| {
(file_id, file_data.syntax_transient())
})
);
ReadonlySourceRoot {
symbol_index,
file_map,
module_map,
}
}
fn data(&self, file_id: FileId) -> &FileData {
match self.file_map.get(&file_id) {
Some(data) => data,
None => panic!("unknown file: {:?}", file_id),
}
}
}
impl SourceRoot for ReadonlySourceRoot {
fn contains(&self, file_id: FileId) -> bool {
self.file_map.contains_key(&file_id)
}
fn module_map(&self) -> &ModuleMap {
&self.module_map
}
fn lines(&self, file_id: FileId) -> &LineIndex {
self.data(file_id).lines()
}
fn syntax(&self, file_id: FileId) -> &File {
self.data(file_id).syntax()
}
fn symbols<'a>(&'a self, acc: &mut Vec<&'a SymbolIndex>) {
acc.push(&self.symbol_index)
}
}

View file

@ -4,39 +4,46 @@ use libsyntax2::{
SyntaxKind::{self, *}, SyntaxKind::{self, *},
}; };
use fst::{self, Streamer}; use fst::{self, Streamer};
use rayon::prelude::*;
use {Query, FileId, JobToken}; use {Query, FileId, JobToken};
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct FileSymbols { pub(crate) struct SymbolIndex {
symbols: Vec<(FileId, FileSymbol)>, symbols: Vec<(FileId, FileSymbol)>,
map: fst::Map, map: fst::Map,
} }
impl FileSymbols { impl SymbolIndex {
pub(crate) fn new(file_id: FileId, file: &File) -> FileSymbols { pub(crate) fn for_files(files: impl ParallelIterator<Item=(FileId, File)>) -> SymbolIndex {
let mut symbols = file_symbols(file) let mut symbols = files
.into_iter() .flat_map(|(file_id, file)| {
.map(|s| (s.name.as_str().to_lowercase(), s)) file_symbols(&file)
.into_iter()
.map(move |symbol| {
(symbol.name.as_str().to_lowercase(), (file_id, symbol))
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0));
symbols.sort_by(|s1, s2| s1.0.cmp(&s2.0));
symbols.dedup_by(|s1, s2| s1.0 == s2.0); symbols.dedup_by(|s1, s2| s1.0 == s2.0);
let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) = let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) =
symbols.into_iter() symbols.into_iter().unzip();
.map(|(name, symbol)| (name, (file_id, symbol)))
.unzip();
let map = fst::Map::from_iter( let map = fst::Map::from_iter(
names.into_iter().zip(0u64..) names.into_iter().zip(0u64..)
).unwrap(); ).unwrap();
FileSymbols { symbols, map } SymbolIndex { symbols, map }
}
pub(crate) fn for_file(file_id: FileId, file: File) -> SymbolIndex {
SymbolIndex::for_files(::rayon::iter::once((file_id, file)))
} }
} }
impl Query { impl Query {
pub(crate) fn search( pub(crate) fn search(
mut self, mut self,
indices: &[&FileSymbols], indices: &[&SymbolIndex],
token: &JobToken, token: &JobToken,
) -> Vec<(FileId, FileSymbol)> { ) -> Vec<(FileId, FileSymbol)> {