Move DepKind to rustc_query_system and define it as u16

This commit is contained in:
John Kåre Alsaker
2023-09-15 15:39:11 +02:00
parent 66ab7e6883
commit 1806efe7f2
24 changed files with 509 additions and 519 deletions

View File

@@ -36,7 +36,7 @@
//! store it directly after the header with leb128.
use super::query::DepGraphQuery;
use super::{DepKind, DepNode, DepNodeIndex};
use super::{DepKind, DepNode, DepNodeIndex, Deps};
use crate::dep_graph::EdgesVec;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fingerprint::PackedFingerprint;
@@ -70,9 +70,9 @@ const DEP_NODE_WIDTH_BITS: usize = DEP_NODE_SIZE / 2;
/// Data for use when recompiling the **current crate**.
#[derive(Debug)]
pub struct SerializedDepGraph<K: DepKind> {
pub struct SerializedDepGraph {
/// The set of all DepNodes in the graph
nodes: IndexVec<SerializedDepNodeIndex, DepNode<K>>,
nodes: IndexVec<SerializedDepNodeIndex, DepNode>,
/// The set of all Fingerprints in the graph. Each Fingerprint corresponds to
/// the DepNode at the same index in the nodes vector.
fingerprints: IndexVec<SerializedDepNodeIndex, Fingerprint>,
@@ -88,7 +88,7 @@ pub struct SerializedDepGraph<K: DepKind> {
index: Vec<UnhashMap<PackedFingerprint, SerializedDepNodeIndex>>,
}
impl<K: DepKind> Default for SerializedDepGraph<K> {
impl Default for SerializedDepGraph {
fn default() -> Self {
SerializedDepGraph {
nodes: Default::default(),
@@ -100,7 +100,7 @@ impl<K: DepKind> Default for SerializedDepGraph<K> {
}
}
impl<K: DepKind> SerializedDepGraph<K> {
impl SerializedDepGraph {
#[inline]
pub fn edge_targets_from(
&self,
@@ -134,13 +134,13 @@ impl<K: DepKind> SerializedDepGraph<K> {
}
#[inline]
pub fn index_to_node(&self, dep_node_index: SerializedDepNodeIndex) -> DepNode<K> {
pub fn index_to_node(&self, dep_node_index: SerializedDepNodeIndex) -> DepNode {
self.nodes[dep_node_index]
}
#[inline]
pub fn node_to_index_opt(&self, dep_node: &DepNode<K>) -> Option<SerializedDepNodeIndex> {
self.index.get(dep_node.kind.to_u16() as usize)?.get(&dep_node.hash).cloned()
pub fn node_to_index_opt(&self, dep_node: &DepNode) -> Option<SerializedDepNodeIndex> {
self.index.get(dep_node.kind.as_usize())?.get(&dep_node.hash).cloned()
}
#[inline]
@@ -184,11 +184,9 @@ fn mask(bits: usize) -> usize {
usize::MAX >> ((std::mem::size_of::<usize>() * 8) - bits)
}
impl<'a, K: DepKind + Decodable<MemDecoder<'a>>> Decodable<MemDecoder<'a>>
for SerializedDepGraph<K>
{
impl SerializedDepGraph {
#[instrument(level = "debug", skip(d))]
fn decode(d: &mut MemDecoder<'a>) -> SerializedDepGraph<K> {
pub fn decode<D: Deps>(d: &mut MemDecoder<'_>) -> SerializedDepGraph {
// The last 16 bytes are the node count and edge count.
debug!("position: {:?}", d.position());
let (node_count, edge_count) =
@@ -217,14 +215,14 @@ impl<'a, K: DepKind + Decodable<MemDecoder<'a>>> Decodable<MemDecoder<'a>>
// least (34 byte header + 1 byte len + 64 bytes edge data), which is ~1%. A 2-byte leb128
// length is about the same fractional overhead and it amortizes for yet greater lengths.
let mut edge_list_data = Vec::with_capacity(
graph_bytes - node_count * std::mem::size_of::<SerializedNodeHeader<K>>(),
graph_bytes - node_count * std::mem::size_of::<SerializedNodeHeader<D>>(),
);
for _index in 0..node_count {
// Decode the header for this edge; the header packs together as many of the fixed-size
// fields as possible to limit the number of times we update decoder state.
let node_header =
SerializedNodeHeader::<K> { bytes: d.read_array(), _marker: PhantomData };
SerializedNodeHeader::<D> { bytes: d.read_array(), _marker: PhantomData };
let _i: SerializedDepNodeIndex = nodes.push(node_header.node());
debug_assert_eq!(_i.index(), _index);
@@ -256,12 +254,12 @@ impl<'a, K: DepKind + Decodable<MemDecoder<'a>>> Decodable<MemDecoder<'a>>
edge_list_data.extend(&[0u8; DEP_NODE_PAD]);
// Read the number of each dep kind and use it to create an hash map with a suitable size.
let mut index: Vec<_> = (0..(K::MAX as usize + 1))
let mut index: Vec<_> = (0..(D::DEP_KIND_MAX + 1))
.map(|_| UnhashMap::with_capacity_and_hasher(d.read_u32() as usize, Default::default()))
.collect();
for (idx, node) in nodes.iter_enumerated() {
index[node.kind.to_u16() as usize].insert(node.hash, idx);
index[node.kind.as_usize()].insert(node.hash, idx);
}
SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data, index }
@@ -276,20 +274,20 @@ impl<'a, K: DepKind + Decodable<MemDecoder<'a>>> Decodable<MemDecoder<'a>>
/// * The `DepKind`'s discriminant (a u16, but not all bits are used...)
/// * The byte width of the encoded edges for this node
/// * In whatever bits remain, the length of the edge list for this node, if it fits
struct SerializedNodeHeader<K> {
struct SerializedNodeHeader<D> {
// 2 bytes for the DepNode
// 16 for Fingerprint in DepNode
// 16 for Fingerprint in NodeInfo
bytes: [u8; 34],
_marker: PhantomData<K>,
_marker: PhantomData<D>,
}
// The fields of a `SerializedNodeHeader`, this struct is an implementation detail and exists only
// to make the implementation of `SerializedNodeHeader` simpler.
struct Unpacked<K> {
struct Unpacked {
len: Option<usize>,
bytes_per_index: usize,
kind: K,
kind: DepKind,
hash: PackedFingerprint,
fingerprint: Fingerprint,
}
@@ -301,20 +299,20 @@ struct Unpacked<K> {
// 0..M length of the edge
// M..M+N bytes per index
// M+N..16 kind
impl<K: DepKind> SerializedNodeHeader<K> {
const TOTAL_BITS: usize = std::mem::size_of::<K>() * 8;
impl<D: Deps> SerializedNodeHeader<D> {
const TOTAL_BITS: usize = std::mem::size_of::<DepKind>() * 8;
const LEN_BITS: usize = Self::TOTAL_BITS - Self::KIND_BITS - Self::WIDTH_BITS;
const WIDTH_BITS: usize = DEP_NODE_WIDTH_BITS;
const KIND_BITS: usize = Self::TOTAL_BITS - K::MAX.leading_zeros() as usize;
const KIND_BITS: usize = Self::TOTAL_BITS - D::DEP_KIND_MAX.leading_zeros() as usize;
const MAX_INLINE_LEN: usize = (u16::MAX as usize >> (Self::TOTAL_BITS - Self::LEN_BITS)) - 1;
#[inline]
fn new(node_info: &NodeInfo<K>) -> Self {
fn new(node_info: &NodeInfo) -> Self {
debug_assert_eq!(Self::TOTAL_BITS, Self::LEN_BITS + Self::WIDTH_BITS + Self::KIND_BITS);
let NodeInfo { node, fingerprint, edges } = node_info;
let mut head = node.kind.to_u16();
let mut head = node.kind.as_inner();
let free_bytes = edges.max_index().leading_zeros() as usize / 8;
let bytes_per_index = (DEP_NODE_SIZE - free_bytes).saturating_sub(1);
@@ -347,7 +345,7 @@ impl<K: DepKind> SerializedNodeHeader<K> {
}
#[inline]
fn unpack(&self) -> Unpacked<K> {
fn unpack(&self) -> Unpacked {
let head = u16::from_le_bytes(self.bytes[..2].try_into().unwrap());
let hash = self.bytes[2..18].try_into().unwrap();
let fingerprint = self.bytes[18..].try_into().unwrap();
@@ -359,7 +357,7 @@ impl<K: DepKind> SerializedNodeHeader<K> {
Unpacked {
len: len.checked_sub(1),
bytes_per_index: bytes_per_index as usize + 1,
kind: DepKind::from_u16(kind),
kind: DepKind::new(kind),
hash: Fingerprint::from_le_bytes(hash).into(),
fingerprint: Fingerprint::from_le_bytes(fingerprint),
}
@@ -381,7 +379,7 @@ impl<K: DepKind> SerializedNodeHeader<K> {
}
#[inline]
fn node(&self) -> DepNode<K> {
fn node(&self) -> DepNode {
let Unpacked { kind, hash, .. } = self.unpack();
DepNode { kind, hash }
}
@@ -395,15 +393,15 @@ impl<K: DepKind> SerializedNodeHeader<K> {
}
#[derive(Debug)]
struct NodeInfo<K: DepKind> {
node: DepNode<K>,
struct NodeInfo {
node: DepNode,
fingerprint: Fingerprint,
edges: EdgesVec,
}
impl<K: DepKind> Encodable<FileEncoder> for NodeInfo<K> {
fn encode(&self, e: &mut FileEncoder) {
let header = SerializedNodeHeader::new(self);
impl NodeInfo {
fn encode<D: Deps>(&self, e: &mut FileEncoder) {
let header = SerializedNodeHeader::<D>::new(self);
e.write_array(header.bytes);
if header.len().is_none() {
@@ -420,41 +418,43 @@ impl<K: DepKind> Encodable<FileEncoder> for NodeInfo<K> {
}
}
struct Stat<K: DepKind> {
kind: K,
struct Stat {
kind: DepKind,
node_counter: u64,
edge_counter: u64,
}
struct EncoderState<K: DepKind> {
struct EncoderState<D: Deps> {
encoder: FileEncoder,
total_node_count: usize,
total_edge_count: usize,
stats: Option<FxHashMap<K, Stat<K>>>,
stats: Option<FxHashMap<DepKind, Stat>>,
/// Stores the number of times we've encoded each dep kind.
kind_stats: Vec<u32>,
marker: PhantomData<D>,
}
impl<K: DepKind> EncoderState<K> {
impl<D: Deps> EncoderState<D> {
fn new(encoder: FileEncoder, record_stats: bool) -> Self {
Self {
encoder,
total_edge_count: 0,
total_node_count: 0,
stats: record_stats.then(FxHashMap::default),
kind_stats: iter::repeat(0).take(K::MAX as usize + 1).collect(),
kind_stats: iter::repeat(0).take(D::DEP_KIND_MAX as usize + 1).collect(),
marker: PhantomData,
}
}
fn encode_node(
&mut self,
node: &NodeInfo<K>,
record_graph: &Option<Lock<DepGraphQuery<K>>>,
node: &NodeInfo,
record_graph: &Option<Lock<DepGraphQuery>>,
) -> DepNodeIndex {
let index = DepNodeIndex::new(self.total_node_count);
self.total_node_count += 1;
self.kind_stats[node.node.kind.to_u16() as usize] += 1;
self.kind_stats[node.node.kind.as_usize()] += 1;
let edge_count = node.edges.len();
self.total_edge_count += edge_count;
@@ -475,12 +475,19 @@ impl<K: DepKind> EncoderState<K> {
}
let encoder = &mut self.encoder;
node.encode(encoder);
node.encode::<D>(encoder);
index
}
fn finish(self, profiler: &SelfProfilerRef) -> FileEncodeResult {
let Self { mut encoder, total_node_count, total_edge_count, stats: _, kind_stats } = self;
let Self {
mut encoder,
total_node_count,
total_edge_count,
stats: _,
kind_stats,
marker: _,
} = self;
let node_count = total_node_count.try_into().unwrap();
let edge_count = total_edge_count.try_into().unwrap();
@@ -506,12 +513,12 @@ impl<K: DepKind> EncoderState<K> {
}
}
pub struct GraphEncoder<K: DepKind> {
status: Lock<EncoderState<K>>,
record_graph: Option<Lock<DepGraphQuery<K>>>,
pub struct GraphEncoder<D: Deps> {
status: Lock<EncoderState<D>>,
record_graph: Option<Lock<DepGraphQuery>>,
}
impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
impl<D: Deps> GraphEncoder<D> {
pub fn new(
encoder: FileEncoder,
prev_node_count: usize,
@@ -523,7 +530,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
GraphEncoder { status, record_graph }
}
pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery<K>)) {
pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery)) {
if let Some(record_graph) = &self.record_graph {
f(&record_graph.lock())
}
@@ -584,7 +591,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
pub(crate) fn send(
&self,
profiler: &SelfProfilerRef,
node: DepNode<K>,
node: DepNode,
fingerprint: Fingerprint,
edges: EdgesVec,
) -> DepNodeIndex {