yet another db api

This commit is contained in:
Aleksey Kladov 2018-09-15 17:21:47 +03:00
parent 0d7b1e442d
commit d59413c895
5 changed files with 276 additions and 260 deletions

View file

@ -0,0 +1,155 @@
use std::{
sync::Arc,
any::Any,
hash::{Hash, Hasher},
collections::hash_map::{DefaultHasher, HashMap},
iter,
};
use salsa;
use {FileId, imp::FileResolverImp};
use super::{State, Query, QueryCtx};
pub(super) type Data = Arc<Any + Send + Sync + 'static>;
#[derive(Debug)]
pub(super) struct Db {
names: Arc<HashMap<salsa::QueryTypeId, &'static str>>,
pub(super) imp: salsa::Db<State, Data>,
}
impl Db {
pub(super) fn new(mut reg: QueryRegistry) -> Db {
let config = reg.config.take().unwrap();
Db {
names: Arc::new(reg.names),
imp: salsa::Db::new(config, State::default())
}
}
pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
let names = self.names.clone();
let mut invalidations = salsa::Invalidations::new();
invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
if resolver_changed {
invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
} else {
invalidations.invalidate(FILE_SET, iter::empty());
}
let imp = self.imp.with_ground_data(
new_state,
invalidations,
);
Db { names, imp }
}
pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
ctx.trace().into_iter().map(|it| self.names[&it]).collect()
}
}
pub(crate) trait EvalQuery {
type Params;
type Output;
fn query_type(&self) -> salsa::QueryTypeId;
fn f(&self) -> salsa::QueryFn<State, Data>;
fn get(&self, &QueryCtx, Self::Params) -> Arc<Self::Output>;
}
impl<T, R> EvalQuery for Query<T, R>
where
T: Hash + Send + Sync + 'static,
R: Hash + Send + Sync + 'static,
{
type Params = T;
type Output = R;
fn query_type(&self) -> salsa::QueryTypeId {
salsa::QueryTypeId(self.0)
}
fn f(&self) -> salsa::QueryFn<State, Data> {
let f = self.1;
Box::new(move |ctx, data| {
let ctx = QueryCtx { imp: ctx };
let data: &T = data.downcast_ref().unwrap();
let res = f(ctx, data);
let h = hash(&res);
(Arc::new(res), salsa::OutputFingerprint(h))
})
}
fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
let query_id = salsa::QueryId(
self.query_type(),
salsa::InputFingerprint(hash(&params)),
);
let res = ctx.imp.get(query_id, Arc::new(params));
res.downcast().unwrap()
}
}
pub(super) struct QueryRegistry {
config: Option<salsa::QueryConfig<State, Data>>,
names: HashMap<salsa::QueryTypeId, &'static str>,
}
impl QueryRegistry {
pub(super) fn new() -> QueryRegistry {
let mut config = salsa::QueryConfig::<State, Data>::new();
config = config.with_ground_query(
FILE_TEXT, Box::new(|state, params| {
let file_id: &FileId = params.downcast_ref().unwrap();
let res = state.file_map[file_id].clone();
let fingerprint = salsa::OutputFingerprint(hash(&res));
(res, fingerprint)
})
);
config = config.with_ground_query(
FILE_SET, Box::new(|state, _params| {
let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
let hash = hash(&file_ids);
let file_resolver = state.file_resolver.clone();
let res = (file_ids, file_resolver);
let fingerprint = salsa::OutputFingerprint(hash);
(Arc::new(res), fingerprint)
})
);
let mut names = HashMap::new();
names.insert(FILE_TEXT, "FILE_TEXT");
names.insert(FILE_SET, "FILE_SET");
QueryRegistry { config: Some(config), names }
}
pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
let id = q.query_type();
let prev = self.names.insert(id, name);
assert!(prev.is_none(), "duplicate query: {:?}", id);
let config = self.config.take().unwrap();
let config = config.with_query(id, q.f());
self.config= Some(config);
}
pub(super) fn finish(mut self) -> salsa::QueryConfig<State, Data> {
self.config.take().unwrap()
}
}
fn hash<T: Hash>(x: &T) -> u64 {
let mut hasher = DefaultHasher::new();
x.hash(&mut hasher);
hasher.finish()
}
const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
let query_id = salsa::QueryId(
FILE_TEXT,
salsa::InputFingerprint(hash(&file_id)),
);
let res = ctx.imp.get(query_id, Arc::new(file_id));
res.downcast().unwrap()
}
const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
let query_id = salsa::QueryId(
FILE_SET,
salsa::InputFingerprint(hash(&())),
);
let res = ctx.imp.get(query_id, Arc::new(()));
res.downcast().unwrap()
}

View file

@ -1,195 +1,99 @@
mod queries;
mod imp;
use std::{
hash::{Hash},
sync::Arc,
fmt::Debug,
any::Any,
iter,
};
use im;
use salsa;
use {
FileId,
imp::{FileResolverImp},
};
use {FileId, imp::FileResolverImp};
#[derive(Clone, Default)]
#[derive(Debug, Default, Clone)]
pub(crate) struct State {
pub(crate) resolver: FileResolverImp,
pub(crate) file_map: im::HashMap<FileId, Arc<str>>,
}
type Data = Arc<Any + Send + Sync + 'static>;
pub(crate) struct QueryCtx<'a> {
inner: &'a salsa::QueryCtx<State, Data>
pub(crate) file_map: im::HashMap<FileId, Arc<String>>,
pub(crate) file_resolver: FileResolverImp
}
#[derive(Debug)]
pub(crate) struct Db {
inner: salsa::Db<State, Data>
imp: imp::Db,
}
struct GroundQuery<T, R> {
id: u16,
f: fn(&State, &T) -> R,
h: fn(&R) -> u64,
#[derive(Clone, Copy)]
pub(crate) struct QueryCtx<'a> {
imp: &'a salsa::QueryCtx<State, imp::Data>,
}
pub(crate) struct Query<T, R> {
pub(crate) id: u16,
pub(crate) f: fn(QueryCtx, &T) -> R,
pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R);
pub(crate) struct QueryRegistry {
imp: imp::QueryRegistry,
}
impl Db {
pub(crate) fn new() -> Db {
let state = Default::default();
Db { inner: salsa::Db::new(query_config(), state) }
let reg = QueryRegistry::new();
Db { imp: imp::Db::new(reg.imp) }
}
pub(crate) fn state(&self) -> &State {
self.inner.ground_data()
self.imp.imp.ground_data()
}
pub(crate) fn with_state(
&self,
new_state: State,
updated_files: &[FileId],
file_set_changed: bool,
) -> Db {
let mut inv = salsa::Invalidations::new();
if file_set_changed {
inv.invalidate(
salsa::QueryTypeId(queries::FILE_SET.id),
iter::once(salsa::InputFingerprint(hash(&()))),
);
} else {
inv.invalidate(
salsa::QueryTypeId(queries::FILE_SET.id),
iter::empty(),
);
}
inv.invalidate(
salsa::QueryTypeId(queries::FILE_TEXT.id),
updated_files.iter().map(hash).map(salsa::InputFingerprint),
);
Db { inner: self.inner.with_ground_data(new_state, inv) }
pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) }
}
pub(crate) fn get<T, R>(&self, q: Query<T, R>, params: T) -> (Arc<R>, Vec<u16>)
where
T: Hash + Send + Sync + 'static,
R: Send + Sync + 'static,
{
let query_id = salsa::QueryId(
salsa::QueryTypeId(q.id),
salsa::InputFingerprint(hash(&params)),
);
let params = Arc::new(params);
let (res, events) = self.inner.get(query_id, params);
let res = res.downcast().unwrap();
let events = events.into_iter().map(|it| it.0).collect();
(res, events)
pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R {
let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
f(ctx)
}
pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) {
let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
let res = f(ctx);
let trace = self.imp.extract_trace(ctx.imp);
(res, trace)
}
}
impl<'a> QueryCtx<'a> {
fn get_g<T, R>(&self, q: GroundQuery<T, R>, params: T) -> Arc<R>
where
T: Hash + Send + Sync + 'static,
R: Send + Sync + 'static,
{
let query_id = salsa::QueryId(
salsa::QueryTypeId(q.id),
salsa::InputFingerprint(hash(&params)),
);
let res = self.inner.get(query_id, Arc::new(params));
res.downcast().unwrap()
}
pub(crate) fn get<T, R>(&self, q: Query<T, R>, params: T) -> Arc<R>
where
T: Hash + Send + Sync + 'static,
R: Send + Sync + 'static,
{
let query_id = salsa::QueryId(
salsa::QueryTypeId(q.id),
salsa::InputFingerprint(hash(&params)),
);
let res = self.inner.get(query_id, Arc::new(params));
res.downcast().unwrap()
pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> {
q.get(self, params)
}
}
fn query_config() -> salsa::QueryConfig<State, Data> {
let mut res = salsa::QueryConfig::new();
let queries: Vec<BoxedGroundQuery> = vec![
queries::FILE_TEXT.into(),
queries::FILE_SET.into(),
];
for q in queries {
res = res.with_ground_query(q.query_type, q.f)
}
let mut queries: Vec<BoxedQuery> = vec![
queries::FILE_SYNTAX.into(),
];
::module_map_db::queries(&mut queries);
for q in queries {
res = res.with_query(q.query_type, q.f);
}
res
pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
imp::file_text(ctx, file_id)
}
struct BoxedGroundQuery {
query_type: salsa::QueryTypeId,
f: Box<Fn(&State, &Data) -> (Data, salsa::OutputFingerprint) + Send + Sync + 'static>,
pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
imp::file_set(ctx)
}
pub(crate) use self::queries::file_syntax;
mod queries {
use libsyntax2::File;
use {FileId};
use super::{Query, QueryCtx, QueryRegistry, file_text};
pub(crate) fn register_queries(reg: &mut QueryRegistry) {
reg.add(FILE_SYNTAX, "FILE_SYNTAX")
}
pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File {
(&*ctx.get(FILE_SYNTAX, file_id)).clone()
}
pub(super) const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| {
let text = file_text(ctx, *file_id);
File::parse(&*text)
});
}
impl<T, R> From<GroundQuery<T, R>> for BoxedGroundQuery
where
T: Send + Sync + 'static,
R: Send + Sync + 'static,
{
fn from(q: GroundQuery<T, R>) -> BoxedGroundQuery
{
BoxedGroundQuery {
query_type: salsa::QueryTypeId(q.id),
f: Box::new(move |state, data| {
let data: &T = data.downcast_ref().unwrap();
let res = (q.f)(state, data);
let h = (q.h)(&res);
(Arc::new(res), salsa::OutputFingerprint(h))
})
}
impl QueryRegistry {
fn new() -> QueryRegistry {
let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
queries::register_queries(&mut reg);
::module_map_db::register_queries(&mut reg);
reg
}
pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {
self.imp.add(q, name)
}
}
pub(crate) struct BoxedQuery {
query_type: salsa::QueryTypeId,
f: Box<Fn(&salsa::QueryCtx<State, Data>, &Data) -> (Data, salsa::OutputFingerprint) + Send + Sync + 'static>,
}
impl<T, R> From<Query<T, R>> for BoxedQuery
where
T: Hash + Send + Sync + 'static,
R: Hash + Send + Sync + 'static,
{
fn from(q: Query<T, R>) -> BoxedQuery
{
BoxedQuery {
query_type: salsa::QueryTypeId(q.id),
f: Box::new(move |ctx, data| {
let ctx = QueryCtx { inner: ctx };
let data: &T = data.downcast_ref().unwrap();
let res = (q.f)(ctx, data);
let h = hash(&res);
(Arc::new(res), salsa::OutputFingerprint(h))
})
}
}
}
fn hash<T: ::std::hash::Hash>(x: &T) -> u64 {
use std::hash::Hasher;
let mut hasher = ::std::collections::hash_map::DefaultHasher::new();
::std::hash::Hash::hash(x, &mut hasher);
hasher.finish()
}

View file

@ -1,43 +0,0 @@
use std::sync::Arc;
use libsyntax2::{File};
use {
FileId, FileResolverImp,
db::{Query, GroundQuery, QueryCtx, hash},
};
impl<'a> QueryCtx<'a> {
pub(crate) fn file_set(&self) -> Arc<(Vec<FileId>, FileResolverImp)> {
self.get_g(FILE_SET, ())
}
pub(crate) fn file_text(&self, file_id: FileId) -> Arc<str> {
Arc::clone(&*self.get_g(FILE_TEXT, file_id))
}
pub(crate) fn file_syntax(&self, file_id: FileId) -> File {
(&*self.get(FILE_SYNTAX, file_id)).clone()
}
}
pub(super) const FILE_TEXT: GroundQuery<FileId, Arc<str>> = GroundQuery {
id: 10,
f: |state, id| state.file_map[&id].clone(),
h: hash,
};
pub(super) const FILE_SET: GroundQuery<(), (Vec<FileId>, FileResolverImp)> = GroundQuery {
id: 11,
f: |state, &()| {
let files = state.file_map.keys().cloned().collect();
let resolver = state.resolver.clone();
(files, resolver)
},
h: |(files, _)| hash(files),
};
pub(super) const FILE_SYNTAX: Query<FileId, File> = Query {
id: 20,
f: |ctx, file_id: &FileId| {
let text = ctx.file_text(*file_id);
File::parse(&*text)
}
};

View file

@ -4,15 +4,16 @@ use std::sync::Arc;
use {
FileId,
db::{
BoxedQuery, Query, QueryCtx
Query, QueryRegistry, QueryCtx,
file_syntax, file_set
},
module_map::resolve_submodule,
};
pub(crate) fn queries(acc: &mut Vec<BoxedQuery>) {
acc.push(MODULE_DESCR.into());
acc.push(RESOLVE_SUBMODULE.into());
acc.push(PARENT_MODULE.into());
pub(crate) fn register_queries(reg: &mut QueryRegistry) {
reg.add(MODULE_DESCR, "MODULE_DESCR");
reg.add(RESOLVE_SUBMODULE, "RESOLVE_SUBMODULE");
reg.add(PARENT_MODULE, "PARENT_MODULE");
}
impl<'a> QueryCtx<'a> {
@ -24,41 +25,32 @@ impl<'a> QueryCtx<'a> {
}
}
pub(crate) const MODULE_DESCR: Query<FileId, descr::ModuleDescr> = Query {
id: 30,
f: |ctx, &file_id| {
let file = ctx.file_syntax(file_id);
descr::ModuleDescr::new(file.ast())
}
};
const MODULE_DESCR: Query<FileId, descr::ModuleDescr> = Query(30, |ctx, &file_id| {
let file = file_syntax(ctx, file_id);
descr::ModuleDescr::new(file.ast())
});
pub(crate) const RESOLVE_SUBMODULE: Query<(FileId, descr::Submodule), Vec<FileId>> = Query {
id: 31,
f: |ctx, params| {
let files = ctx.file_set();
resolve_submodule(params.0, &params.1.name, &files.1).0
}
};
const RESOLVE_SUBMODULE: Query<(FileId, descr::Submodule), Vec<FileId>> = Query(31, |ctx, params| {
let files = file_set(ctx);
resolve_submodule(params.0, &params.1.name, &files.1).0
});
pub(crate) const PARENT_MODULE: Query<FileId, Vec<FileId>> = Query {
id: 40,
f: |ctx, file_id| {
let files = ctx.file_set();
let res = files.0.iter()
.map(|&parent_id| (parent_id, ctx.module_descr(parent_id)))
.filter(|(parent_id, descr)| {
descr.submodules.iter()
.any(|subm| {
ctx.resolve_submodule(*parent_id, subm.clone())
.iter()
.any(|it| it == file_id)
})
})
.map(|(id, _)| id)
.collect();
res
}
};
const PARENT_MODULE: Query<FileId, Vec<FileId>> = Query(40, |ctx, file_id| {
let files = file_set(ctx);
let res = files.0.iter()
.map(|&parent_id| (parent_id, ctx.module_descr(parent_id)))
.filter(|(parent_id, descr)| {
descr.submodules.iter()
.any(|subm| {
ctx.resolve_submodule(*parent_id, subm.clone())
.iter()
.any(|it| it == file_id)
})
})
.map(|(id, _)| id)
.collect();
res
});
#[cfg(test)]
mod tests {
@ -107,34 +99,36 @@ mod tests {
self.next_file_id += 1;
self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
let mut new_state = self.db.state().clone();
new_state.file_map.insert(file_id, text.to_string().into_boxed_str().into());
new_state.resolver = FileResolverImp::new(
new_state.file_map.insert(file_id, Arc::new(text.to_string()));
new_state.file_resolver = FileResolverImp::new(
Arc::new(FileMap(self.fm.clone()))
);
self.db = self.db.with_state(new_state, &[file_id], true);
self.db = self.db.with_changes(new_state, &[file_id], true);
file_id
}
fn remove_file(&mut self, file_id: FileId) {
self.fm.remove(&file_id);
let mut new_state = self.db.state().clone();
new_state.file_map.remove(&file_id);
new_state.resolver = FileResolverImp::new(
new_state.file_resolver = FileResolverImp::new(
Arc::new(FileMap(self.fm.clone()))
);
self.db = self.db.with_state(new_state, &[file_id], true);
self.db = self.db.with_changes(new_state, &[file_id], true);
}
fn change_file(&mut self, file_id: FileId, new_text: &str) {
let mut new_state = self.db.state().clone();
new_state.file_map.insert(file_id, new_text.to_string().into_boxed_str().into());
self.db = self.db.with_state(new_state, &[file_id], false);
new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
self.db = self.db.with_changes(new_state, &[file_id], false);
}
fn check_parent_modules(
&self,
file_id: FileId,
expected: &[FileId],
queries: &[(u16, u64)]
queries: &[(&'static str, u64)]
) {
let (actual, events) = self.db.get(PARENT_MODULE, file_id);
let (actual, events) = self.db.trace_query(|ctx| {
ctx.get(PARENT_MODULE, file_id)
});
assert_eq!(actual.as_slice(), expected);
let mut counts = HashMap::new();
events.into_iter()
@ -156,25 +150,25 @@ mod tests {
fn test_parent_module() {
let mut f = Fixture::new();
let foo = f.add_file("/foo.rs", "");
f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 1)]);
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
let lib = f.add_file("/lib.rs", "mod foo;");
f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 1)]);
f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 0)]);
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
f.change_file(lib, "");
f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 1)]);
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
f.change_file(lib, "mod foo;");
f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 1)]);
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
f.change_file(lib, "mod bar;");
f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 1)]);
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
f.change_file(lib, "mod foo;");
f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 1)]);
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
f.remove_file(lib);
f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 0)]);
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]);
}
}

View file

@ -8,8 +8,8 @@ use std::{
};
use parking_lot::Mutex;
type GroundQueryFn<T, D> = Box<Fn(&T, &D) -> (D, OutputFingerprint) + Send + Sync + 'static>;
type QueryFn<T, D> = Box<Fn(&QueryCtx<T, D>, &D) -> (D, OutputFingerprint) + Send + Sync + 'static>;
pub type GroundQueryFn<T, D> = Box<Fn(&T, &D) -> (D, OutputFingerprint) + Send + Sync + 'static>;
pub type QueryFn<T, D> = Box<Fn(&QueryCtx<T, D>, &D) -> (D, OutputFingerprint) + Send + Sync + 'static>;
#[derive(Debug)]
pub struct Db<T, D> {
@ -118,6 +118,9 @@ where
self.record_dep(query_id, output_fingerprint);
res
}
pub fn trace(&self) -> Vec<QueryTypeId> {
::std::mem::replace(&mut *self.executed.borrow_mut(), Vec::new())
}
fn get_inner(
&self,
@ -261,12 +264,15 @@ where
query_config: Arc::clone(&self.query_config)
}
}
pub fn query_ctx(&self) -> QueryCtx<T, D> {
QueryCtx::new(self)
}
pub fn get(
&self,
query_id: QueryId,
params: D,
) -> (D, Vec<QueryTypeId>) {
let ctx = QueryCtx::new(self);
let ctx = self.query_ctx();
let res = ctx.get(query_id, params.into());
let executed = ::std::mem::replace(&mut *ctx.executed.borrow_mut(), Vec::new());
(res, executed)