Drop dead code

This commit is contained in:
Aleksey Kladov
2018-10-15 21:56:01 +03:00
parent 7503c5528f
commit ee69fddf02
4 changed files with 0 additions and 401 deletions

View File

@@ -1,153 +0,0 @@
use std::{
sync::Arc,
any::Any,
hash::{Hash, Hasher},
collections::hash_map::{DefaultHasher},
iter,
};
use rustc_hash::FxHashMap;
use salsa;
use crate::{FileId, imp::FileResolverImp};
use super::{State, Query, QueryCtx};
pub(super) type Data = Arc<Any + Send + Sync + 'static>;
#[derive(Debug)]
pub(super) struct Db {
names: Arc<FxHashMap<salsa::QueryTypeId, &'static str>>,
pub(super) imp: salsa::Db<State, Data>,
}
impl Db {
pub(super) fn new(mut reg: QueryRegistry) -> Db {
let config = reg.config.take().unwrap();
Db {
names: Arc::new(reg.names),
imp: salsa::Db::new(config, State::default())
}
}
pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
let names = self.names.clone();
let mut invalidations = salsa::Invalidations::new();
invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
if resolver_changed {
invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
} else {
invalidations.invalidate(FILE_SET, iter::empty());
}
let imp = self.imp.with_ground_data(
new_state,
invalidations,
);
Db { names, imp }
}
pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
ctx.trace().into_iter().map(|it| self.names[&it]).collect()
}
}
pub(crate) trait EvalQuery {
type Params;
type Output;
fn query_type(&self) -> salsa::QueryTypeId;
fn f(&self) -> salsa::QueryFn<State, Data>;
fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output>;
}
impl<T, R> EvalQuery for Query<T, R>
where
T: Hash + Send + Sync + 'static,
R: Hash + Send + Sync + 'static,
{
type Params = T;
type Output = R;
fn query_type(&self) -> salsa::QueryTypeId {
salsa::QueryTypeId(self.0)
}
fn f(&self) -> salsa::QueryFn<State, Data> {
let f = self.1;
Box::new(move |ctx, data| {
let ctx = QueryCtx { imp: ctx };
let data: &T = data.downcast_ref().unwrap();
let res = f(ctx, data);
let h = hash(&res);
(Arc::new(res), salsa::OutputFingerprint(h))
})
}
fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
let query_id = salsa::QueryId(
self.query_type(),
salsa::InputFingerprint(hash(&params)),
);
let res = ctx.imp.get(query_id, Arc::new(params));
res.downcast().unwrap()
}
}
pub(super) struct QueryRegistry {
config: Option<salsa::QueryConfig<State, Data>>,
names: FxHashMap<salsa::QueryTypeId, &'static str>,
}
impl QueryRegistry {
pub(super) fn new() -> QueryRegistry {
let mut config = salsa::QueryConfig::<State, Data>::new();
config = config.with_ground_query(
FILE_TEXT, Box::new(|state, params| {
let file_id: &FileId = params.downcast_ref().unwrap();
let res = state.file_map[file_id].clone();
let fingerprint = salsa::OutputFingerprint(hash(&res));
(res, fingerprint)
})
);
config = config.with_ground_query(
FILE_SET, Box::new(|state, _params| {
let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
let hash = hash(&file_ids);
let file_resolver = state.file_resolver.clone();
let res = (file_ids, file_resolver);
let fingerprint = salsa::OutputFingerprint(hash);
(Arc::new(res), fingerprint)
})
);
let mut names = FxHashMap::default();
names.insert(FILE_TEXT, "FILE_TEXT");
names.insert(FILE_SET, "FILE_SET");
QueryRegistry { config: Some(config), names }
}
pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
let id = q.query_type();
let prev = self.names.insert(id, name);
assert!(prev.is_none(), "duplicate query: {:?}", id);
let config = self.config.take().unwrap();
let config = config.with_query(id, q.f());
self.config= Some(config);
}
}
fn hash<T: Hash>(x: &T) -> u64 {
let mut hasher = DefaultHasher::new();
x.hash(&mut hasher);
hasher.finish()
}
const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
let query_id = salsa::QueryId(
FILE_TEXT,
salsa::InputFingerprint(hash(&file_id)),
);
let res = ctx.imp.get(query_id, Arc::new(file_id));
res.downcast().unwrap()
}
const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
let query_id = salsa::QueryId(
FILE_SET,
salsa::InputFingerprint(hash(&())),
);
let res = ctx.imp.get(query_id, Arc::new(()));
res.downcast().unwrap()
}

View File

@@ -1,189 +0,0 @@
use std::{
fmt,
sync::Arc,
hash::{Hash, Hasher},
};
use salsa;
use rustc_hash::FxHashSet;
use ra_syntax::File;
use ra_editor::{LineIndex};
use crate::{
symbol_index::SymbolIndex,
module_map::{ModulesDatabase, ModuleTreeQuery, ModuleDescriptorQuery},
FileId, FileResolverImp,
};
#[derive(Default)]
pub(crate) struct RootDatabase {
runtime: salsa::runtime::Runtime<RootDatabase>,
}
impl fmt::Debug for RootDatabase {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("RootDatabase { ... }")
}
}
impl salsa::Database for RootDatabase {
fn salsa_runtime(&self) -> &salsa::runtime::Runtime<RootDatabase> {
&self.runtime
}
}
salsa::database_storage! {
pub(crate) struct RootDatabaseStorage for RootDatabase {
impl FilesDatabase {
fn file_text() for FileTextQuery;
fn file_set() for FileSetQuery;
}
impl SyntaxDatabase {
fn file_syntax() for FileSyntaxQuery;
fn file_lines() for FileLinesQuery;
fn file_symbols() for FileSymbolsQuery;
}
impl ModulesDatabase {
fn module_tree() for ModuleTreeQuery;
fn module_descriptor() for ModuleDescriptorQuery;
}
}
}
salsa::query_group! {
pub(crate) trait FilesDatabase: salsa::Database {
fn file_text(file_id: FileId) -> Arc<String> {
type FileTextQuery;
storage input;
}
fn file_set(key: ()) -> Arc<FileSet> {
type FileSetQuery;
storage input;
}
}
}
#[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct FileSet {
pub(crate) files: FxHashSet<FileId>,
pub(crate) resolver: FileResolverImp,
}
impl Hash for FileSet {
fn hash<H: Hasher>(&self, hasher: &mut H) {
let mut files = self.files.iter().cloned().collect::<Vec<_>>();
files.sort();
files.hash(hasher);
}
}
salsa::query_group! {
pub(crate) trait SyntaxDatabase: FilesDatabase {
fn file_syntax(file_id: FileId) -> File {
type FileSyntaxQuery;
}
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
type FileLinesQuery;
}
fn file_symbols(file_id: FileId) -> Arc<SymbolIndex> {
type FileSymbolsQuery;
}
}
}
fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> File {
let text = db.file_text(file_id);
File::parse(&*text)
}
fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
Arc::new(LineIndex::new(&*text))
}
fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<SymbolIndex> {
let syntax = db.file_syntax(file_id);
Arc::new(SymbolIndex::for_file(file_id, syntax))
}
// mod imp;
// use std::{
// sync::Arc,
// };
// use im;
// use salsa;
// use {FileId, imp::FileResolverImp};
// #[derive(Debug, Default, Clone)]
// pub(crate) struct State {
// pub(crate) file_map: im::HashMap<FileId, Arc<String>>,
// pub(crate) file_resolver: FileResolverImp
// }
// #[derive(Debug)]
// pub(crate) struct Db {
// imp: imp::Db,
// }
// #[derive(Clone, Copy)]
// pub(crate) struct QueryCtx<'a> {
// imp: &'a salsa::QueryCtx<State, imp::Data>,
// }
// pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R);
// pub(crate) struct QueryRegistry {
// imp: imp::QueryRegistry,
// }
// impl Default for Db {
// fn default() -> Db {
// Db::new()
// }
// }
// impl Db {
// pub(crate) fn new() -> Db {
// let reg = QueryRegistry::new();
// Db { imp: imp::Db::new(reg.imp) }
// }
// pub(crate) fn state(&self) -> &State {
// self.imp.imp.ground_data()
// }
// pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
// Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) }
// }
// pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R {
// let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
// f(ctx)
// }
// #[allow(unused)]
// pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) {
// let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
// let res = f(ctx);
// let trace = self.imp.extract_trace(ctx.imp);
// (res, trace)
// }
// }
// impl<'a> QueryCtx<'a> {
// pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> {
// q.get(self, params)
// }
// }
// pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
// imp::file_text(ctx, file_id)
// }
// pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
// imp::file_set(ctx)
// }
// impl QueryRegistry {
// fn new() -> QueryRegistry {
// let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
// ::queries::register_queries(&mut reg);
// ::module_map::register_queries(&mut reg);
// reg
// }
// pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {
// self.imp.add(q, name)
// }
// }