2016-03-28 17:42:39 -04:00
|
|
|
//! Code to save/load the dep-graph from files.
|
|
|
|
|
|
2020-03-29 17:19:48 +02:00
|
|
|
use rustc_data_structures::fx::FxHashMap;
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::dep_graph::{PreviousDepGraph, SerializedDepGraph, WorkProduct, WorkProductId};
|
|
|
|
|
use rustc_middle::ty::query::OnDiskCache;
|
|
|
|
|
use rustc_middle::ty::TyCtxt;
|
2016-09-01 16:55:33 +03:00
|
|
|
use rustc_serialize::opaque::Decoder;
|
2019-12-22 17:42:04 -05:00
|
|
|
use rustc_serialize::Decodable as RustcDecodable;
|
2020-03-11 12:49:08 +01:00
|
|
|
use rustc_session::Session;
|
2017-09-28 15:26:11 +02:00
|
|
|
use std::path::Path;
|
2016-03-28 17:42:39 -04:00
|
|
|
|
|
|
|
|
use super::data::*;
|
2016-09-26 16:05:01 -04:00
|
|
|
use super::file_format;
|
2019-12-22 17:42:04 -05:00
|
|
|
use super::fs::*;
|
2017-01-16 17:54:20 -05:00
|
|
|
use super::work_product;
|
2016-03-28 17:42:39 -04:00
|
|
|
|
2019-06-21 23:49:03 +02:00
|
|
|
pub fn dep_graph_tcx_init(tcx: TyCtxt<'_>) {
|
2017-09-28 15:26:11 +02:00
|
|
|
if !tcx.dep_graph.is_fully_enabled() {
|
2019-12-22 17:42:04 -05:00
|
|
|
return;
|
2016-07-21 12:44:59 -04:00
|
|
|
}
|
2016-03-28 17:42:39 -04:00
|
|
|
|
2017-09-28 15:26:11 +02:00
|
|
|
tcx.allocate_metadata_dep_nodes();
|
2016-03-28 17:42:39 -04:00
|
|
|
}
|
|
|
|
|
|
2018-05-07 22:30:44 -04:00
|
|
|
type WorkProductMap = FxHashMap<WorkProductId, WorkProduct>;
|
|
|
|
|
|
2017-12-07 16:05:29 +01:00
|
|
|
pub enum LoadResult<T> {
|
|
|
|
|
Ok { data: T },
|
|
|
|
|
DataOutOfDate,
|
|
|
|
|
Error { message: String },
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-07 22:30:44 -04:00
|
|
|
impl LoadResult<(PreviousDepGraph, WorkProductMap)> {
|
|
|
|
|
pub fn open(self, sess: &Session) -> (PreviousDepGraph, WorkProductMap) {
|
2017-12-07 16:05:29 +01:00
|
|
|
match self {
|
|
|
|
|
LoadResult::Error { message } => {
|
2018-03-22 22:25:57 -04:00
|
|
|
sess.warn(&message);
|
2018-10-16 16:57:53 +02:00
|
|
|
Default::default()
|
2019-12-22 17:42:04 -05:00
|
|
|
}
|
2017-12-07 16:05:29 +01:00
|
|
|
LoadResult::DataOutOfDate => {
|
|
|
|
|
if let Err(err) = delete_all_session_dir_contents(sess) {
|
2019-12-22 17:42:04 -05:00
|
|
|
sess.err(&format!(
|
|
|
|
|
"Failed to delete invalidated or incompatible \
|
2017-12-07 16:05:29 +01:00
|
|
|
incremental compilation session directory contents `{}`: {}.",
|
2019-12-22 17:42:04 -05:00
|
|
|
dep_graph_path(sess).display(),
|
|
|
|
|
err
|
|
|
|
|
));
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2018-10-16 16:57:53 +02:00
|
|
|
Default::default()
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2019-12-22 17:42:04 -05:00
|
|
|
LoadResult::Ok { data } => data,
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-10 14:27:52 -04:00
|
|
|
fn load_data(
|
|
|
|
|
report_incremental_info: bool,
|
|
|
|
|
path: &Path,
|
|
|
|
|
nightly_build: bool,
|
|
|
|
|
) -> LoadResult<(Vec<u8>, usize)> {
|
|
|
|
|
match file_format::read_file(report_incremental_info, path, nightly_build) {
|
2019-12-22 17:42:04 -05:00
|
|
|
Ok(Some(data_and_pos)) => LoadResult::Ok { data: data_and_pos },
|
2016-09-26 16:05:01 -04:00
|
|
|
Ok(None) => {
|
|
|
|
|
// The file either didn't exist or was produced by an incompatible
|
|
|
|
|
// compiler version. Neither is an error.
|
2017-12-07 16:05:29 +01:00
|
|
|
LoadResult::DataOutOfDate
|
2016-07-21 12:44:59 -04:00
|
|
|
}
|
2019-12-22 17:42:04 -05:00
|
|
|
Err(err) => LoadResult::Error {
|
|
|
|
|
message: format!("could not load dep-graph from `{}`: {}", path.display(), err),
|
|
|
|
|
},
|
2016-03-28 17:42:39 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
fn delete_dirty_work_product(sess: &Session, swp: SerializedWorkProduct) {
|
2016-07-21 12:44:59 -04:00
|
|
|
debug!("delete_dirty_work_product({:?})", swp);
|
2018-05-07 22:30:44 -04:00
|
|
|
work_product::delete_workproduct_files(sess, &swp.work_product);
|
2016-07-21 12:44:59 -04:00
|
|
|
}
|
2016-08-30 16:49:54 -04:00
|
|
|
|
2017-12-07 16:05:29 +01:00
|
|
|
/// Either a result that has already be computed or a
|
|
|
|
|
/// handle that will let us wait until it is computed
|
|
|
|
|
/// by a background thread.
|
|
|
|
|
pub enum MaybeAsync<T> {
|
|
|
|
|
Sync(T),
|
2019-12-22 17:42:04 -05:00
|
|
|
Async(std::thread::JoinHandle<T>),
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
|
|
|
|
impl<T> MaybeAsync<T> {
|
|
|
|
|
pub fn open(self) -> std::thread::Result<T> {
|
|
|
|
|
match self {
|
|
|
|
|
MaybeAsync::Sync(result) => Ok(result),
|
2019-12-22 17:42:04 -05:00
|
|
|
MaybeAsync::Async(handle) => handle.join(),
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2017-09-22 13:00:42 +02:00
|
|
|
}
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2017-09-22 13:00:42 +02:00
|
|
|
|
2018-12-08 20:30:23 +01:00
|
|
|
pub type DepGraphFuture = MaybeAsync<LoadResult<(PreviousDepGraph, WorkProductMap)>>;
|
|
|
|
|
|
2017-12-07 16:05:29 +01:00
|
|
|
/// Launch a thread and load the dependency graph in the background.
|
2018-12-08 20:30:23 +01:00
|
|
|
pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
|
2017-12-07 16:05:29 +01:00
|
|
|
// Since `sess` isn't `Sync`, we perform all accesses to `sess`
|
|
|
|
|
// before we fire the background thread.
|
2017-09-22 13:00:42 +02:00
|
|
|
|
2019-10-08 14:05:41 +02:00
|
|
|
let prof = sess.prof.clone();
|
2017-12-03 14:21:23 +01:00
|
|
|
|
2017-12-07 16:05:29 +01:00
|
|
|
if sess.opts.incremental.is_none() {
|
|
|
|
|
// No incremental compilation.
|
2019-12-22 17:42:04 -05:00
|
|
|
return MaybeAsync::Sync(LoadResult::Ok { data: Default::default() });
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2017-11-13 15:13:44 +01:00
|
|
|
|
2020-01-09 03:48:00 +01:00
|
|
|
let _timer = sess.prof.generic_activity("incr_comp_prepare_load_dep_graph");
|
|
|
|
|
|
2017-12-07 16:05:29 +01:00
|
|
|
// Calling `sess.incr_comp_session_dir()` will panic if `sess.opts.incremental.is_none()`.
|
|
|
|
|
// Fortunately, we just checked that this isn't the case.
|
|
|
|
|
let path = dep_graph_path_from(&sess.incr_comp_session_dir());
|
|
|
|
|
let report_incremental_info = sess.opts.debugging_opts.incremental_info;
|
|
|
|
|
let expected_hash = sess.opts.dep_tracking_hash();
|
|
|
|
|
|
2018-10-16 10:44:26 +02:00
|
|
|
let mut prev_work_products = FxHashMap::default();
|
2020-10-10 14:27:52 -04:00
|
|
|
let nightly_build = sess.is_nightly_build();
|
2018-05-07 22:30:44 -04:00
|
|
|
|
|
|
|
|
// If we are only building with -Zquery-dep-graph but without an actual
|
2018-05-08 09:13:18 -04:00
|
|
|
// incr. comp. session directory, we skip this. Otherwise we'd fail
|
2018-05-07 22:30:44 -04:00
|
|
|
// when trying to load work products.
|
|
|
|
|
if sess.incr_comp_session_dir_opt().is_some() {
|
|
|
|
|
let work_products_path = work_products_path(sess);
|
2020-10-10 14:27:52 -04:00
|
|
|
let load_result = load_data(report_incremental_info, &work_products_path, nightly_build);
|
2018-05-07 22:30:44 -04:00
|
|
|
|
|
|
|
|
if let LoadResult::Ok { data: (work_products_data, start_pos) } = load_result {
|
|
|
|
|
// Decode the list of work_products
|
|
|
|
|
let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos);
|
|
|
|
|
let work_products: Vec<SerializedWorkProduct> =
|
|
|
|
|
RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| {
|
2019-12-22 17:42:04 -05:00
|
|
|
let msg = format!(
|
|
|
|
|
"Error decoding `work-products` from incremental \
|
|
|
|
|
compilation session directory: {}",
|
|
|
|
|
e
|
|
|
|
|
);
|
2018-05-07 22:30:44 -04:00
|
|
|
sess.fatal(&msg[..])
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
for swp in work_products {
|
|
|
|
|
let mut all_files_exist = true;
|
2020-05-12 15:56:02 +10:00
|
|
|
if let Some(ref file_name) = swp.work_product.saved_file {
|
2018-05-07 22:30:44 -04:00
|
|
|
let path = in_incr_comp_dir_sess(sess, file_name);
|
|
|
|
|
if !path.exists() {
|
|
|
|
|
all_files_exist = false;
|
|
|
|
|
|
|
|
|
|
if sess.opts.debugging_opts.incremental_info {
|
2019-12-22 17:42:04 -05:00
|
|
|
eprintln!(
|
|
|
|
|
"incremental: could not find file for work \
|
|
|
|
|
product: {}",
|
|
|
|
|
path.display()
|
|
|
|
|
);
|
2018-05-07 22:30:44 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if all_files_exist {
|
|
|
|
|
debug!("reconcile_work_products: all files for {:?} exist", swp);
|
|
|
|
|
prev_work_products.insert(swp.id, swp.work_product);
|
|
|
|
|
} else {
|
|
|
|
|
debug!("reconcile_work_products: some file for {:?} does not exist", swp);
|
|
|
|
|
delete_dirty_work_product(sess, swp);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-07 16:05:29 +01:00
|
|
|
MaybeAsync::Async(std::thread::spawn(move || {
|
2020-01-07 21:34:08 +01:00
|
|
|
let _prof_timer = prof.generic_activity("incr_comp_load_dep_graph");
|
2020-01-01 02:24:05 +01:00
|
|
|
|
2020-10-10 14:27:52 -04:00
|
|
|
match load_data(report_incremental_info, &path, nightly_build) {
|
2020-01-01 02:24:05 +01:00
|
|
|
LoadResult::DataOutOfDate => LoadResult::DataOutOfDate,
|
|
|
|
|
LoadResult::Error { message } => LoadResult::Error { message },
|
|
|
|
|
LoadResult::Ok { data: (bytes, start_pos) } => {
|
|
|
|
|
let mut decoder = Decoder::new(&bytes, start_pos);
|
|
|
|
|
let prev_commandline_args_hash = u64::decode(&mut decoder)
|
|
|
|
|
.expect("Error reading commandline arg hash from cached dep-graph");
|
|
|
|
|
|
|
|
|
|
if prev_commandline_args_hash != expected_hash {
|
|
|
|
|
if report_incremental_info {
|
|
|
|
|
println!(
|
|
|
|
|
"[incremental] completely ignoring cache because of \
|
2019-12-22 17:42:04 -05:00
|
|
|
differing commandline arguments"
|
2020-01-01 02:24:05 +01:00
|
|
|
);
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2020-01-01 02:24:05 +01:00
|
|
|
// We can't reuse the cache, purge it.
|
|
|
|
|
debug!("load_dep_graph_new: differing commandline arg hashes");
|
2017-09-22 13:00:42 +02:00
|
|
|
|
2020-01-01 02:24:05 +01:00
|
|
|
// No need to do any further work
|
|
|
|
|
return LoadResult::DataOutOfDate;
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2020-01-01 02:24:05 +01:00
|
|
|
|
|
|
|
|
let dep_graph = SerializedDepGraph::decode(&mut decoder)
|
|
|
|
|
.expect("Error reading cached dep-graph");
|
|
|
|
|
|
|
|
|
|
LoadResult::Ok { data: (PreviousDepGraph::new(dep_graph), prev_work_products) }
|
2017-12-07 16:05:29 +01:00
|
|
|
}
|
2020-01-01 02:24:05 +01:00
|
|
|
}
|
2017-12-07 16:05:29 +01:00
|
|
|
}))
|
2017-09-22 13:00:42 +02:00
|
|
|
}
|
2017-10-19 14:32:39 +02:00
|
|
|
|
2019-06-21 23:49:03 +02:00
|
|
|
pub fn load_query_result_cache(sess: &Session) -> OnDiskCache<'_> {
|
2020-03-23 11:41:35 +11:00
|
|
|
if sess.opts.incremental.is_none() {
|
2018-08-18 12:14:09 +02:00
|
|
|
return OnDiskCache::new_empty(sess.source_map());
|
2017-10-19 14:32:39 +02:00
|
|
|
}
|
|
|
|
|
|
2019-10-08 14:05:41 +02:00
|
|
|
let _prof_timer = sess.prof.generic_activity("incr_comp_load_query_result_cache");
|
|
|
|
|
|
2020-10-10 14:27:52 -04:00
|
|
|
match load_data(
|
|
|
|
|
sess.opts.debugging_opts.incremental_info,
|
|
|
|
|
&query_cache_path(sess),
|
|
|
|
|
sess.is_nightly_build(),
|
|
|
|
|
) {
|
2019-12-22 17:42:04 -05:00
|
|
|
LoadResult::Ok { data: (bytes, start_pos) } => OnDiskCache::new(sess, bytes, start_pos),
|
|
|
|
|
_ => OnDiskCache::new_empty(sess.source_map()),
|
2017-10-19 14:32:39 +02:00
|
|
|
}
|
|
|
|
|
}
|