Rename rustc_mir to rustc_const_eval.

This commit is contained in:
Camille GILLOT
2021-01-05 20:08:11 +01:00
parent fd9c04fe32
commit c5fc2609f0
64 changed files with 66 additions and 66 deletions

View File

@@ -0,0 +1,210 @@
use std::error::Error;
use std::fmt;
use rustc_errors::{DiagnosticBuilder, ErrorReported};
use rustc_hir as hir;
use rustc_middle::mir::AssertKind;
use rustc_middle::ty::{layout::LayoutError, query::TyCtxtAt, ConstInt};
use rustc_span::{Span, Symbol};
use super::InterpCx;
use crate::interpret::{
struct_error, ErrorHandled, FrameInfo, InterpError, InterpErrorInfo, Machine, MachineStopType,
};
/// The CTFE machine has some custom error kinds.
#[derive(Clone, Debug)]
pub enum ConstEvalErrKind {
NeedsRfc(String),
ConstAccessesStatic,
ModifiedGlobal,
AssertFailure(AssertKind<ConstInt>),
Panic { msg: Symbol, line: u32, col: u32, file: Symbol },
Abort(String),
}
impl MachineStopType for ConstEvalErrKind {
fn is_hard_err(&self) -> bool {
match self {
Self::Panic { .. } => true,
_ => false,
}
}
}
// The errors become `MachineStop` with plain strings when being raised.
// `ConstEvalErr` (in `librustc_middle/mir/interpret/error.rs`) knows to
// handle these.
impl<'tcx> Into<InterpErrorInfo<'tcx>> for ConstEvalErrKind {
fn into(self) -> InterpErrorInfo<'tcx> {
err_machine_stop!(self).into()
}
}
impl fmt::Display for ConstEvalErrKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use self::ConstEvalErrKind::*;
match *self {
NeedsRfc(ref msg) => {
write!(f, "\"{}\" needs an rfc before being allowed inside constants", msg)
}
ConstAccessesStatic => write!(f, "constant accesses static"),
ModifiedGlobal => {
write!(f, "modifying a static's initial value from another static's initializer")
}
AssertFailure(ref msg) => write!(f, "{:?}", msg),
Panic { msg, line, col, file } => {
write!(f, "the evaluated program panicked at '{}', {}:{}:{}", msg, file, line, col)
}
Abort(ref msg) => write!(f, "{}", msg),
}
}
}
impl Error for ConstEvalErrKind {}
/// When const-evaluation errors, this type is constructed with the resulting information,
/// and then used to emit the error as a lint or hard error.
#[derive(Debug)]
pub struct ConstEvalErr<'tcx> {
pub span: Span,
pub error: InterpError<'tcx>,
pub stacktrace: Vec<FrameInfo<'tcx>>,
}
impl<'tcx> ConstEvalErr<'tcx> {
/// Turn an interpreter error into something to report to the user.
/// As a side-effect, if RUSTC_CTFE_BACKTRACE is set, this prints the backtrace.
/// Should be called only if the error is actually going to to be reported!
pub fn new<'mir, M: Machine<'mir, 'tcx>>(
ecx: &InterpCx<'mir, 'tcx, M>,
error: InterpErrorInfo<'tcx>,
span: Option<Span>,
) -> ConstEvalErr<'tcx>
where
'tcx: 'mir,
{
error.print_backtrace();
let stacktrace = ecx.generate_stacktrace();
ConstEvalErr {
error: error.into_kind(),
stacktrace,
span: span.unwrap_or_else(|| ecx.cur_span()),
}
}
pub fn struct_error(
&self,
tcx: TyCtxtAt<'tcx>,
message: &str,
emit: impl FnOnce(DiagnosticBuilder<'_>),
) -> ErrorHandled {
self.struct_generic(tcx, message, emit, None)
}
pub fn report_as_error(&self, tcx: TyCtxtAt<'tcx>, message: &str) -> ErrorHandled {
self.struct_error(tcx, message, |mut e| e.emit())
}
pub fn report_as_lint(
&self,
tcx: TyCtxtAt<'tcx>,
message: &str,
lint_root: hir::HirId,
span: Option<Span>,
) -> ErrorHandled {
self.struct_generic(
tcx,
message,
|mut lint: DiagnosticBuilder<'_>| {
// Apply the span.
if let Some(span) = span {
let primary_spans = lint.span.primary_spans().to_vec();
// point at the actual error as the primary span
lint.replace_span_with(span);
// point to the `const` statement as a secondary span
// they don't have any label
for sp in primary_spans {
if sp != span {
lint.span_label(sp, "");
}
}
}
lint.emit();
},
Some(lint_root),
)
}
/// Create a diagnostic for this const eval error.
///
/// Sets the message passed in via `message` and adds span labels with detailed error
/// information before handing control back to `emit` to do any final processing.
/// It's the caller's responsibility to call emit(), stash(), etc. within the `emit`
/// function to dispose of the diagnostic properly.
///
/// If `lint_root.is_some()` report it as a lint, else report it as a hard error.
/// (Except that for some errors, we ignore all that -- see `must_error` below.)
fn struct_generic(
&self,
tcx: TyCtxtAt<'tcx>,
message: &str,
emit: impl FnOnce(DiagnosticBuilder<'_>),
lint_root: Option<hir::HirId>,
) -> ErrorHandled {
let finish = |mut err: DiagnosticBuilder<'_>, span_msg: Option<String>| {
trace!("reporting const eval failure at {:?}", self.span);
if let Some(span_msg) = span_msg {
err.span_label(self.span, span_msg);
}
// Add spans for the stacktrace. Don't print a single-line backtrace though.
if self.stacktrace.len() > 1 {
for frame_info in &self.stacktrace {
err.span_label(frame_info.span, frame_info.to_string());
}
}
// Let the caller finish the job.
emit(err)
};
// Special handling for certain errors
match &self.error {
// Don't emit a new diagnostic for these errors
err_inval!(Layout(LayoutError::Unknown(_))) | err_inval!(TooGeneric) => {
return ErrorHandled::TooGeneric;
}
err_inval!(AlreadyReported(error_reported)) => {
return ErrorHandled::Reported(*error_reported);
}
err_inval!(Layout(LayoutError::SizeOverflow(_))) => {
// We must *always* hard error on these, even if the caller wants just a lint.
// The `message` makes little sense here, this is a more serious error than the
// caller thinks anyway.
// See <https://github.com/rust-lang/rust/pull/63152>.
finish(struct_error(tcx, &self.error.to_string()), None);
return ErrorHandled::Reported(ErrorReported);
}
_ => {}
};
let err_msg = self.error.to_string();
// Regular case - emit a lint.
if let Some(lint_root) = lint_root {
// Report as lint.
let hir_id =
self.stacktrace.iter().rev().find_map(|frame| frame.lint_root).unwrap_or(lint_root);
tcx.struct_span_lint_hir(
rustc_session::lint::builtin::CONST_ERR,
hir_id,
tcx.span,
|lint| finish(lint.build(message), Some(err_msg)),
);
ErrorHandled::Linted
} else {
// Report as hard error.
finish(struct_error(tcx, message), Some(err_msg));
ErrorHandled::Reported(ErrorReported)
}
}
}

View File

@@ -0,0 +1,399 @@
use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr, MemoryExtra};
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
ScalarMaybeUninit, StackPopCleanup,
};
use rustc_errors::ErrorReported;
use rustc_hir::def::DefKind;
use rustc_middle::mir;
use rustc_middle::mir::interpret::ErrorHandled;
use rustc_middle::mir::pretty::display_allocation;
use rustc_middle::traits::Reveal;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, subst::Subst, TyCtxt};
use rustc_span::source_map::Span;
use rustc_target::abi::Abi;
use std::borrow::Cow;
use std::convert::TryInto;
pub fn note_on_undefined_behavior_error() -> &'static str {
"The rules on what exactly is undefined behavior aren't clear, \
so this check might be overzealous. Please open an issue on the rustc \
repository if you believe it should not be considered undefined behavior."
}
// Returns a pointer to where the result lives
fn eval_body_using_ecx<'mir, 'tcx>(
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
cid: GlobalId<'tcx>,
body: &'mir mir::Body<'tcx>,
) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
let tcx = *ecx.tcx;
assert!(
cid.promoted.is_some()
|| matches!(
ecx.tcx.def_kind(cid.instance.def_id()),
DefKind::Const
| DefKind::Static
| DefKind::ConstParam
| DefKind::AnonConst
| DefKind::AssocConst
),
"Unexpected DefKind: {:?}",
ecx.tcx.def_kind(cid.instance.def_id())
);
let layout = ecx.layout_of(body.return_ty().subst(tcx, cid.instance.substs))?;
assert!(!layout.is_unsized());
let ret = ecx.allocate(layout, MemoryKind::Stack)?;
let name =
with_no_trimmed_paths(|| ty::tls::with(|tcx| tcx.def_path_str(cid.instance.def_id())));
let prom = cid.promoted.map_or_else(String::new, |p| format!("::promoted[{:?}]", p));
trace!("eval_body_using_ecx: pushing stack frame for global: {}{}", name, prom);
ecx.push_stack_frame(
cid.instance,
body,
Some(&ret.into()),
StackPopCleanup::None { cleanup: false },
)?;
// The main interpreter loop.
ecx.run()?;
// Intern the result
let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
debug!("eval_body_using_ecx done: {:?}", *ret);
Ok(ret)
}
/// The `InterpCx` is only meant to be used to do field and index projections into constants for
/// `simd_shuffle` and const patterns in match arms.
///
/// The function containing the `match` that is currently being analyzed may have generic bounds
/// that inform us about the generic bounds of the constant. E.g., using an associated constant
/// of a function's generic parameter will require knowledge about the bounds on the generic
/// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
pub(super) fn mk_eval_cx<'mir, 'tcx>(
tcx: TyCtxt<'tcx>,
root_span: Span,
param_env: ty::ParamEnv<'tcx>,
can_access_statics: bool,
) -> CompileTimeEvalContext<'mir, 'tcx> {
debug!("mk_eval_cx: {:?}", param_env);
InterpCx::new(
tcx,
root_span,
param_env,
CompileTimeInterpreter::new(tcx.const_eval_limit()),
MemoryExtra { can_access_statics },
)
}
/// This function converts an interpreter value into a constant that is meant for use in the
/// type system.
pub(super) fn op_to_const<'tcx>(
ecx: &CompileTimeEvalContext<'_, 'tcx>,
op: &OpTy<'tcx>,
) -> ConstValue<'tcx> {
// We do not have value optimizations for everything.
// Only scalars and slices, since they are very common.
// Note that further down we turn scalars of uninitialized bits back to `ByRef`. These can result
// from scalar unions that are initialized with one of their zero sized variants. We could
// instead allow `ConstValue::Scalar` to store `ScalarMaybeUninit`, but that would affect all
// the usual cases of extracting e.g. a `usize`, without there being a real use case for the
// `Undef` situation.
let try_as_immediate = match op.layout.abi {
Abi::Scalar(..) => true,
Abi::ScalarPair(..) => match op.layout.ty.kind() {
ty::Ref(_, inner, _) => match *inner.kind() {
ty::Slice(elem) => elem == ecx.tcx.types.u8,
ty::Str => true,
_ => false,
},
_ => false,
},
_ => false,
};
let immediate = if try_as_immediate {
Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
} else {
// It is guaranteed that any non-slice scalar pair is actually ByRef here.
// When we come back from raw const eval, we are always by-ref. The only way our op here is
// by-val is if we are in destructure_const, i.e., if this is (a field of) something that we
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
// structs containing such.
op.try_as_mplace()
};
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let to_const_value = |mplace: &MPlaceTy<'_>| match mplace.ptr.into_parts() {
(Some(alloc_id), offset) => {
let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
ConstValue::ByRef { alloc, offset }
}
(None, offset) => {
assert!(mplace.layout.is_zst());
assert_eq!(
offset.bytes() % mplace.layout.align.abi.bytes(),
0,
"this MPlaceTy must come from a validated constant, thus we can assume the \
alignment is correct",
);
ConstValue::Scalar(Scalar::ZST)
}
};
match immediate {
Ok(ref mplace) => to_const_value(mplace),
// see comment on `let try_as_immediate` above
Err(imm) => match *imm {
Immediate::Scalar(x) => match x {
ScalarMaybeUninit::Scalar(s) => ConstValue::Scalar(s),
ScalarMaybeUninit::Uninit => to_const_value(&op.assert_mem_place()),
},
Immediate::ScalarPair(a, b) => {
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (data, start) = match ecx.scalar_to_ptr(a.check_init().unwrap()).into_parts() {
(Some(alloc_id), offset) => {
(ecx.tcx.global_alloc(alloc_id).unwrap_memory(), offset.bytes())
}
(None, _offset) => (
ecx.tcx.intern_const_alloc(Allocation::from_bytes_byte_aligned_immutable(
b"" as &[u8],
)),
0,
),
};
let len = b.to_machine_usize(ecx).unwrap();
let start = start.try_into().unwrap();
let len: usize = len.try_into().unwrap();
ConstValue::Slice { data, start, end: start + len }
}
},
}
}
fn turn_into_const_value<'tcx>(
tcx: TyCtxt<'tcx>,
constant: ConstAlloc<'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
) -> ConstValue<'tcx> {
let cid = key.value;
let def_id = cid.instance.def.def_id();
let is_static = tcx.is_static(def_id);
let ecx = mk_eval_cx(tcx, tcx.def_span(key.value.instance.def_id()), key.param_env, is_static);
let mplace = ecx.raw_const_to_mplace(constant).expect(
"can only fail if layout computation failed, \
which should have given a good error before ever invoking this function",
);
assert!(
!is_static || cid.promoted.is_some(),
"the `eval_to_const_value_raw` query should not be used for statics, use `eval_to_allocation` instead"
);
// Turn this into a proper constant.
op_to_const(&ecx, &mplace.into())
}
pub fn eval_to_const_value_raw_provider<'tcx>(
tcx: TyCtxt<'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
) -> ::rustc_middle::mir::interpret::EvalToConstValueResult<'tcx> {
// see comment in eval_to_allocation_raw_provider for what we're doing here
if key.param_env.reveal() == Reveal::All {
let mut key = key;
key.param_env = key.param_env.with_user_facing();
match tcx.eval_to_const_value_raw(key) {
// try again with reveal all as requested
Err(ErrorHandled::TooGeneric) => {}
// deduplicate calls
other => return other,
}
}
// We call `const_eval` for zero arg intrinsics, too, in order to cache their value.
// Catch such calls and evaluate them instead of trying to load a constant's MIR.
if let ty::InstanceDef::Intrinsic(def_id) = key.value.instance.def {
let ty = key.value.instance.ty(tcx, key.param_env);
let substs = match ty.kind() {
ty::FnDef(_, substs) => substs,
_ => bug!("intrinsic with type {:?}", ty),
};
return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs).map_err(|error| {
let span = tcx.def_span(def_id);
let error = ConstEvalErr { error: error.into_kind(), stacktrace: vec![], span };
error.report_as_error(tcx.at(span), "could not evaluate nullary intrinsic")
});
}
tcx.eval_to_allocation_raw(key).map(|val| turn_into_const_value(tcx, val, key))
}
pub fn eval_to_allocation_raw_provider<'tcx>(
tcx: TyCtxt<'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
// Because the constant is computed twice (once per value of `Reveal`), we are at risk of
// reporting the same error twice here. To resolve this, we check whether we can evaluate the
// constant in the more restrictive `Reveal::UserFacing`, which most likely already was
// computed. For a large percentage of constants that will already have succeeded. Only
// associated constants of generic functions will fail due to not enough monomorphization
// information being available.
// In case we fail in the `UserFacing` variant, we just do the real computation.
if key.param_env.reveal() == Reveal::All {
let mut key = key;
key.param_env = key.param_env.with_user_facing();
match tcx.eval_to_allocation_raw(key) {
// try again with reveal all as requested
Err(ErrorHandled::TooGeneric) => {}
// deduplicate calls
other => return other,
}
}
if cfg!(debug_assertions) {
// Make sure we format the instance even if we do not print it.
// This serves as a regression test against an ICE on printing.
// The next two lines concatenated contain some discussion:
// https://rust-lang.zulipchat.com/#narrow/stream/146212-t-compiler.2Fconst-eval/
// subject/anon_const_instance_printing/near/135980032
let instance = with_no_trimmed_paths(|| key.value.instance.to_string());
trace!("const eval: {:?} ({})", key, instance);
}
let cid = key.value;
let def = cid.instance.def.with_opt_param();
if let Some(def) = def.as_local() {
if tcx.has_typeck_results(def.did) {
if let Some(error_reported) = tcx.typeck_opt_const_arg(def).tainted_by_errors {
return Err(ErrorHandled::Reported(error_reported));
}
}
if !tcx.is_mir_available(def.did) {
tcx.sess.delay_span_bug(
tcx.def_span(def.did),
&format!("no MIR body is available for {:?}", def.did),
);
return Err(ErrorHandled::Reported(ErrorReported {}));
}
if let Some(error_reported) = tcx.mir_const_qualif_opt_const_arg(def).error_occured {
return Err(ErrorHandled::Reported(error_reported));
}
}
let is_static = tcx.is_static(def.did);
let mut ecx = InterpCx::new(
tcx,
tcx.def_span(def.did),
key.param_env,
CompileTimeInterpreter::new(tcx.const_eval_limit()),
// Statics (and promoteds inside statics) may access other statics, because unlike consts
// they do not have to behave "as if" they were evaluated at runtime.
MemoryExtra { can_access_statics: is_static },
);
let res = ecx.load_mir(cid.instance.def, cid.promoted);
match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body)) {
Err(error) => {
let err = ConstEvalErr::new(&ecx, error, None);
// Some CTFE errors raise just a lint, not a hard error; see
// <https://github.com/rust-lang/rust/issues/71800>.
let is_hard_err = if let Some(def) = def.as_local() {
// (Associated) consts only emit a lint, since they might be unused.
!matches!(tcx.def_kind(def.did.to_def_id()), DefKind::Const | DefKind::AssocConst)
// check if the inner InterpError is hard
|| err.error.is_hard_err()
} else {
// use of broken constant from other crate: always an error
true
};
if is_hard_err {
let msg = if is_static {
Cow::from("could not evaluate static initializer")
} else {
// If the current item has generics, we'd like to enrich the message with the
// instance and its substs: to show the actual compile-time values, in addition to
// the expression, leading to the const eval error.
let instance = &key.value.instance;
if !instance.substs.is_empty() {
let instance = with_no_trimmed_paths(|| instance.to_string());
let msg = format!("evaluation of `{}` failed", instance);
Cow::from(msg)
} else {
Cow::from("evaluation of constant value failed")
}
};
Err(err.report_as_error(ecx.tcx.at(ecx.cur_span()), &msg))
} else {
let hir_id = tcx.hir().local_def_id_to_hir_id(def.as_local().unwrap().did);
Err(err.report_as_lint(
tcx.at(tcx.def_span(def.did)),
"any use of this value will cause an error",
hir_id,
Some(err.span),
))
}
}
Ok(mplace) => {
// Since evaluation had no errors, validate the resulting constant.
// This is a separate `try` block to provide more targeted error reporting.
let validation = try {
let mut ref_tracking = RefTracking::new(mplace);
let mut inner = false;
while let Some((mplace, path)) = ref_tracking.todo.pop() {
let mode = match tcx.static_mutability(cid.instance.def_id()) {
Some(_) if cid.promoted.is_some() => {
// Promoteds in statics are allowed to point to statics.
CtfeValidationMode::Const { inner, allow_static_ptrs: true }
}
Some(_) => CtfeValidationMode::Regular, // a `static`
None => CtfeValidationMode::Const { inner, allow_static_ptrs: false },
};
ecx.const_validate_operand(&mplace.into(), path, &mut ref_tracking, mode)?;
inner = true;
}
};
let alloc_id = mplace.ptr.provenance.unwrap();
if let Err(error) = validation {
// Validation failed, report an error. This is always a hard error.
let err = ConstEvalErr::new(&ecx, error, None);
Err(err.struct_error(
ecx.tcx,
"it is undefined behavior to use this value",
|mut diag| {
diag.note(note_on_undefined_behavior_error());
diag.note(&format!(
"the raw bytes of the constant ({}",
display_allocation(
*ecx.tcx,
ecx.tcx.global_alloc(alloc_id).unwrap_memory()
)
));
diag.emit();
},
))
} else {
// Convert to raw constant
Ok(ConstAlloc { alloc_id, ty: mplace.layout.ty })
}
}
}
}

View File

@@ -0,0 +1,112 @@
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::hir::map::blocks::FnLikeNode;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::Symbol;
use rustc_target::spec::abi::Abi;
/// Whether the `def_id` counts as const fn in your current crate, considering all active
/// feature gates
pub fn is_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
tcx.is_const_fn_raw(def_id)
&& match is_unstable_const_fn(tcx, def_id) {
Some(feature_name) => {
// has a `rustc_const_unstable` attribute, check whether the user enabled the
// corresponding feature gate.
tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == feature_name)
}
// functions without const stability are either stable user written
// const fn or the user is using feature gates and we thus don't
// care what they do
None => true,
}
}
/// Whether the `def_id` is an unstable const fn and what feature gate is necessary to enable it
pub fn is_unstable_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Symbol> {
if tcx.is_const_fn_raw(def_id) {
let const_stab = tcx.lookup_const_stability(def_id)?;
if const_stab.level.is_unstable() { Some(const_stab.feature) } else { None }
} else {
None
}
}
pub fn is_parent_const_impl_raw(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
let parent_id = tcx.hir().get_parent_did(hir_id);
if !parent_id.is_top_level_module() { is_const_impl_raw(tcx, parent_id) } else { false }
}
/// Checks whether the function has a `const` modifier or, in case it is an intrinsic, whether
/// said intrinsic has a `rustc_const_{un,}stable` attribute.
fn is_const_fn_raw(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
let node = tcx.hir().get(hir_id);
if let hir::Node::ForeignItem(hir::ForeignItem { kind: hir::ForeignItemKind::Fn(..), .. }) =
node
{
// Intrinsics use `rustc_const_{un,}stable` attributes to indicate constness. All other
// foreign items cannot be evaluated at compile-time.
if let Abi::RustIntrinsic | Abi::PlatformIntrinsic = tcx.hir().get_foreign_abi(hir_id) {
tcx.lookup_const_stability(def_id).is_some()
} else {
false
}
} else if let Some(fn_like) = FnLikeNode::from_node(node) {
if fn_like.constness() == hir::Constness::Const {
return true;
}
// If the function itself is not annotated with `const`, it may still be a `const fn`
// if it resides in a const trait impl.
is_parent_const_impl_raw(tcx, hir_id)
} else if let hir::Node::Ctor(_) = node {
true
} else {
false
}
}
/// Checks whether the given item is an `impl` that has a `const` modifier.
fn is_const_impl_raw(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
let node = tcx.hir().get(hir_id);
matches!(
node,
hir::Node::Item(hir::Item {
kind: hir::ItemKind::Impl(hir::Impl { constness: hir::Constness::Const, .. }),
..
})
)
}
fn is_promotable_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
is_const_fn(tcx, def_id)
&& match tcx.lookup_const_stability(def_id) {
Some(stab) => {
if cfg!(debug_assertions) && stab.promotable {
let sig = tcx.fn_sig(def_id);
assert_eq!(
sig.unsafety(),
hir::Unsafety::Normal,
"don't mark const unsafe fns as promotable",
// https://github.com/rust-lang/rust/pull/53851#issuecomment-418760682
);
}
stab.promotable
}
None => false,
}
}
pub fn provide(providers: &mut Providers) {
*providers = Providers {
is_const_fn_raw,
is_const_impl_raw: |tcx, def_id| is_const_impl_raw(tcx, def_id.expect_local()),
is_promotable_const_fn,
..*providers
};
}

View File

@@ -0,0 +1,474 @@
use rustc_middle::mir;
use rustc_middle::ty::{self, Ty};
use std::borrow::Borrow;
use std::collections::hash_map::Entry;
use std::hash::Hash;
use rustc_data_structures::fx::FxHashMap;
use std::fmt;
use rustc_ast::Mutability;
use rustc_hir::def_id::DefId;
use rustc_middle::mir::AssertMessage;
use rustc_session::Limit;
use rustc_span::symbol::{sym, Symbol};
use rustc_target::abi::{Align, Size};
use rustc_target::spec::abi::Abi;
use crate::interpret::{
self, compile_time_machine, AllocId, Allocation, Frame, ImmTy, InterpCx, InterpResult, OpTy,
PlaceTy, Scalar, StackPopUnwind,
};
use super::error::*;
impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
/// "Intercept" a function call to a panic-related function
/// because we have something special to do for it.
/// If this returns successfully (`Ok`), the function should just be evaluated normally.
fn hook_panic_fn(
&mut self,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
// The list of functions we handle here must be in sync with
// `is_lang_panic_fn` in `transform/check_consts/mod.rs`.
let def_id = instance.def_id();
if Some(def_id) == self.tcx.lang_items().panic_fn()
|| Some(def_id) == self.tcx.lang_items().panic_str()
|| Some(def_id) == self.tcx.lang_items().begin_panic_fn()
{
// &str
assert!(args.len() == 1);
let msg_place = self.deref_operand(&args[0])?;
let msg = Symbol::intern(self.read_str(&msg_place)?);
let span = self.find_closest_untracked_caller_location();
let (file, line, col) = self.location_triple_for_span(span);
return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into());
} else if Some(def_id) == self.tcx.lang_items().panic_fmt()
|| Some(def_id) == self.tcx.lang_items().begin_panic_fmt()
{
// For panic_fmt, call const_panic_fmt instead.
if let Some(const_panic_fmt) = self.tcx.lang_items().const_panic_fmt() {
return Ok(Some(
ty::Instance::resolve(
*self.tcx,
ty::ParamEnv::reveal_all(),
const_panic_fmt,
self.tcx.intern_substs(&[]),
)
.unwrap()
.unwrap(),
));
}
}
Ok(None)
}
}
/// Extra machine state for CTFE, and the Machine instance
pub struct CompileTimeInterpreter<'mir, 'tcx> {
/// For now, the number of terminators that can be evaluated before we throw a resource
/// exhaustion error.
///
/// Setting this to `0` disables the limit and allows the interpreter to run forever.
pub steps_remaining: usize,
/// The virtual call stack.
pub(crate) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
}
#[derive(Copy, Clone, Debug)]
pub struct MemoryExtra {
/// We need to make sure consts never point to anything mutable, even recursively. That is
/// relied on for pattern matching on consts with references.
/// To achieve this, two pieces have to work together:
/// * Interning makes everything outside of statics immutable.
/// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
/// This boolean here controls the second part.
pub(super) can_access_statics: bool,
}
impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
pub(super) fn new(const_eval_limit: Limit) -> Self {
CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
}
}
impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
#[inline(always)]
fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
where
K: Borrow<Q>,
{
FxHashMap::contains_key(self, k)
}
#[inline(always)]
fn insert(&mut self, k: K, v: V) -> Option<V> {
FxHashMap::insert(self, k, v)
}
#[inline(always)]
fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
{
FxHashMap::remove(self, k)
}
#[inline(always)]
fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
}
#[inline(always)]
fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
match self.get(&k) {
Some(v) => Ok(v),
None => {
vacant()?;
bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
}
}
}
#[inline(always)]
fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
match self.entry(k) {
Entry::Occupied(e) => Ok(e.into_mut()),
Entry::Vacant(e) => {
let v = vacant()?;
Ok(e.insert(v))
}
}
}
}
crate type CompileTimeEvalContext<'mir, 'tcx> =
InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum MemoryKind {
Heap,
}
impl fmt::Display for MemoryKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
MemoryKind::Heap => write!(f, "heap allocation"),
}
}
}
impl interpret::MayLeak for MemoryKind {
#[inline(always)]
fn may_leak(self) -> bool {
match self {
MemoryKind::Heap => false,
}
}
}
impl interpret::MayLeak for ! {
#[inline(always)]
fn may_leak(self) -> bool {
// `self` is uninhabited
self
}
}
impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool {
match (a, b) {
// Comparisons between integers are always known.
(Scalar::Int { .. }, Scalar::Int { .. }) => a == b,
// Equality with integers can never be known for sure.
(Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => false,
// FIXME: return `true` for when both sides are the same pointer, *except* that
// some things (like functions and vtables) do not have stable addresses
// so we need to be careful around them (see e.g. #73722).
(Scalar::Ptr(..), Scalar::Ptr(..)) => false,
}
}
fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
match (a, b) {
// Comparisons between integers are always known.
(Scalar::Int(_), Scalar::Int(_)) => a != b,
// Comparisons of abstract pointers with null pointers are known if the pointer
// is in bounds, because if they are in bounds, the pointer can't be null.
// Inequality with integers other than null can never be known for sure.
(Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
int.is_null() && !self.memory.ptr_may_be_null(ptr.into())
}
// FIXME: return `true` for at least some comparisons where we can reliably
// determine the result of runtime inequality tests at compile-time.
// Examples include comparison of addresses in different static items.
(Scalar::Ptr(..), Scalar::Ptr(..)) => false,
}
}
}
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
compile_time_machine!(<'mir, 'tcx>);
type MemoryKind = MemoryKind;
type MemoryExtra = MemoryExtra;
const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error
fn load_mir(
ecx: &InterpCx<'mir, 'tcx, Self>,
instance: ty::InstanceDef<'tcx>,
) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
match instance {
ty::InstanceDef::Item(def) => {
if ecx.tcx.is_ctfe_mir_available(def.did) {
Ok(ecx.tcx.mir_for_ctfe_opt_const_arg(def))
} else {
let path = ecx.tcx.def_path_str(def.did);
Err(ConstEvalErrKind::NeedsRfc(format!("calling extern function `{}`", path))
.into())
}
}
_ => Ok(ecx.tcx.instance_mir(instance)),
}
}
fn find_mir_or_eval_fn(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
_abi: Abi,
args: &[OpTy<'tcx>],
_ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
_unwind: StackPopUnwind, // unwinding is not supported in consts
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
debug!("find_mir_or_eval_fn: {:?}", instance);
// Only check non-glue functions
if let ty::InstanceDef::Item(def) = instance.def {
// Execution might have wandered off into other crates, so we cannot do a stability-
// sensitive check here. But we can at least rule out functions that are not const
// at all.
if !ecx.tcx.is_const_fn_raw(def.did) {
// allow calling functions marked with #[default_method_body_is_const].
if !ecx.tcx.has_attr(def.did, sym::default_method_body_is_const) {
// Some functions we support even if they are non-const -- but avoid testing
// that for const fn!
if let Some(new_instance) = ecx.hook_panic_fn(instance, args)? {
// We call another const fn instead.
return Self::find_mir_or_eval_fn(
ecx,
new_instance,
_abi,
args,
_ret,
_unwind,
);
} else {
// We certainly do *not* want to actually call the fn
// though, so be sure we return here.
throw_unsup_format!("calling non-const function `{}`", instance)
}
}
}
}
// This is a const fn. Call it.
Ok(Some(ecx.load_mir(instance.def, None)?))
}
fn call_intrinsic(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
// Shared intrinsics.
if ecx.emulate_intrinsic(instance, args, ret)? {
return Ok(());
}
let intrinsic_name = ecx.tcx.item_name(instance.def_id());
// CTFE-specific intrinsics.
let (dest, ret) = match ret {
None => {
return Err(ConstEvalErrKind::NeedsRfc(format!(
"calling intrinsic `{}`",
intrinsic_name
))
.into());
}
Some(p) => p,
};
match intrinsic_name {
sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
let a = ecx.read_immediate(&args[0])?.to_scalar()?;
let b = ecx.read_immediate(&args[1])?.to_scalar()?;
let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
ecx.guaranteed_eq(a, b)
} else {
ecx.guaranteed_ne(a, b)
};
ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
}
sym::const_allocate => {
let size = ecx.read_scalar(&args[0])?.to_machine_usize(ecx)?;
let align = ecx.read_scalar(&args[1])?.to_machine_usize(ecx)?;
let align = match Align::from_bytes(align) {
Ok(a) => a,
Err(err) => throw_ub_format!("align has to be a power of 2, {}", err),
};
let ptr = ecx.memory.allocate(
Size::from_bytes(size as u64),
align,
interpret::MemoryKind::Machine(MemoryKind::Heap),
)?;
ecx.write_pointer(ptr, dest)?;
}
_ => {
return Err(ConstEvalErrKind::NeedsRfc(format!(
"calling intrinsic `{}`",
intrinsic_name
))
.into());
}
}
ecx.go_to_block(ret);
Ok(())
}
fn assert_panic(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
msg: &AssertMessage<'tcx>,
_unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
use rustc_middle::mir::AssertKind::*;
// Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
let eval_to_int =
|op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
let err = match msg {
BoundsCheck { ref len, ref index } => {
let len = eval_to_int(len)?;
let index = eval_to_int(index)?;
BoundsCheck { len, index }
}
Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
};
Err(ConstEvalErrKind::AssertFailure(err).into())
}
fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: String) -> InterpResult<'tcx, !> {
Err(ConstEvalErrKind::Abort(msg).into())
}
fn binary_ptr_op(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_bin_op: mir::BinOp,
_left: &ImmTy<'tcx>,
_right: &ImmTy<'tcx>,
) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
}
fn box_alloc(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_dest: &PlaceTy<'tcx>,
) -> InterpResult<'tcx> {
Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
}
fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
// The step limit has already been hit in a previous call to `before_terminator`.
if ecx.machine.steps_remaining == 0 {
return Ok(());
}
ecx.machine.steps_remaining -= 1;
if ecx.machine.steps_remaining == 0 {
throw_exhaust!(StepLimitReached)
}
Ok(())
}
#[inline(always)]
fn init_frame_extra(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
frame: Frame<'mir, 'tcx>,
) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
// Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) {
throw_exhaust!(StackFrameLimitReached)
} else {
Ok(frame)
}
}
#[inline(always)]
fn stack(
ecx: &'a InterpCx<'mir, 'tcx, Self>,
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
&ecx.machine.stack
}
#[inline(always)]
fn stack_mut(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
&mut ecx.machine.stack
}
fn before_access_global(
memory_extra: &MemoryExtra,
alloc_id: AllocId,
allocation: &Allocation,
static_def_id: Option<DefId>,
is_write: bool,
) -> InterpResult<'tcx> {
if is_write {
// Write access. These are never allowed, but we give a targeted error message.
if allocation.mutability == Mutability::Not {
Err(err_ub!(WriteToReadOnly(alloc_id)).into())
} else {
Err(ConstEvalErrKind::ModifiedGlobal.into())
}
} else {
// Read access. These are usually allowed, with some exceptions.
if memory_extra.can_access_statics {
// Machine configuration allows us read from anything (e.g., `static` initializer).
Ok(())
} else if static_def_id.is_some() {
// Machine configuration does not allow us to read statics
// (e.g., `const` initializer).
// See const_eval::machine::MemoryExtra::can_access_statics for why
// this check is so important: if we could read statics, we could read pointers
// to mutable allocations *inside* statics. These allocations are not themselves
// statics, so pointers to them can get around the check in `validity.rs`.
Err(ConstEvalErrKind::ConstAccessesStatic.into())
} else {
// Immutable global, this read is fine.
// But make sure we never accept a read from something mutable, that would be
// unsound. The reason is that as the content of this allocation may be different
// now and at run-time, so if we permit reading now we might return the wrong value.
assert_eq!(allocation.mutability, Mutability::Not);
Ok(())
}
}
}
}
// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
// so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
// at the bottom of this file.

View File

@@ -0,0 +1,207 @@
// Not in interpret to make sure we do not use private implementation details
use std::convert::TryFrom;
use rustc_hir::Mutability;
use rustc_middle::ty::{self, TyCtxt};
use rustc_middle::{
mir::{self, interpret::ConstAlloc},
ty::ScalarInt,
};
use rustc_span::{source_map::DUMMY_SP, symbol::Symbol};
use crate::interpret::{
intern_const_alloc_recursive, ConstValue, InternKind, InterpCx, MPlaceTy, MemPlaceMeta, Scalar,
};
mod error;
mod eval_queries;
mod fn_queries;
mod machine;
pub use error::*;
pub use eval_queries::*;
pub use fn_queries::*;
pub use machine::*;
pub(crate) fn const_caller_location(
tcx: TyCtxt<'tcx>,
(file, line, col): (Symbol, u32, u32),
) -> ConstValue<'tcx> {
trace!("const_caller_location: {}:{}:{}", file, line, col);
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
let loc_place = ecx.alloc_caller_location(file, line, col);
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
bug!("intern_const_alloc_recursive should not error in this case")
}
ConstValue::Scalar(Scalar::from_pointer(loc_place.ptr.into_pointer_or_addr().unwrap(), &tcx))
}
/// Convert an evaluated constant to a type level constant
pub(crate) fn const_to_valtree<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
raw: ConstAlloc<'tcx>,
) -> Option<ty::ValTree<'tcx>> {
let ecx = mk_eval_cx(
tcx, DUMMY_SP, param_env,
// It is absolutely crucial for soundness that
// we do not read from static items or other mutable memory.
false,
);
let place = ecx.raw_const_to_mplace(raw).unwrap();
const_to_valtree_inner(&ecx, &place)
}
fn const_to_valtree_inner<'tcx>(
ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
place: &MPlaceTy<'tcx>,
) -> Option<ty::ValTree<'tcx>> {
let branches = |n, variant| {
let place = match variant {
Some(variant) => ecx.mplace_downcast(&place, variant).unwrap(),
None => *place,
};
let variant =
variant.map(|variant| Some(ty::ValTree::Leaf(ScalarInt::from(variant.as_u32()))));
let fields = (0..n).map(|i| {
let field = ecx.mplace_field(&place, i).unwrap();
const_to_valtree_inner(ecx, &field)
});
// For enums, we preped their variant index before the variant's fields so we can figure out
// the variant again when just seeing a valtree.
let branches = variant.into_iter().chain(fields);
Some(ty::ValTree::Branch(
ecx.tcx.arena.alloc_from_iter(branches.collect::<Option<Vec<_>>>()?),
))
};
match place.layout.ty.kind() {
ty::FnDef(..) => Some(ty::ValTree::zst()),
ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
let val = ecx.read_immediate(&place.into()).unwrap();
let val = val.to_scalar().unwrap();
Some(ty::ValTree::Leaf(val.assert_int()))
}
// Raw pointers are not allowed in type level constants, as we cannot properly test them for
// equality at compile-time (see `ptr_guaranteed_eq`/`_ne`).
// Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
// agree with runtime equality tests.
ty::FnPtr(_) | ty::RawPtr(_) => None,
ty::Ref(..) => unimplemented!("need to use deref_const"),
// Trait objects are not allowed in type level constants, as we have no concept for
// resolving their backing type, even if we can do that at const eval time. We may
// hypothetically be able to allow `dyn StructuralEq` trait objects in the future,
// but it is unclear if this is useful.
ty::Dynamic(..) => None,
ty::Slice(_) | ty::Str => {
unimplemented!("need to find the backing data of the slice/str and recurse on that")
}
ty::Tuple(substs) => branches(substs.len(), None),
ty::Array(_, len) => branches(usize::try_from(len.eval_usize(ecx.tcx.tcx, ecx.param_env)).unwrap(), None),
ty::Adt(def, _) => {
if def.variants.is_empty() {
bug!("uninhabited types should have errored and never gotten converted to valtree")
}
let variant = ecx.read_discriminant(&place.into()).unwrap().1;
branches(def.variants[variant].fields.len(), def.is_enum().then_some(variant))
}
ty::Never
| ty::Error(_)
| ty::Foreign(..)
| ty::Infer(ty::FreshIntTy(_))
| ty::Infer(ty::FreshFloatTy(_))
| ty::Projection(..)
| ty::Param(_)
| ty::Bound(..)
| ty::Placeholder(..)
// FIXME(oli-obk): we could look behind opaque types
| ty::Opaque(..)
| ty::Infer(_)
// FIXME(oli-obk): we can probably encode closures just like structs
| ty::Closure(..)
| ty::Generator(..)
| ty::GeneratorWitness(..) => None,
}
}
/// This function uses `unwrap` copiously, because an already validated constant
/// must have valid fields and can thus never fail outside of compiler bugs. However, it is
/// invoked from the pretty printer, where it can receive enums with no variants and e.g.
/// `read_discriminant` needs to be able to handle that.
pub(crate) fn destructure_const<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
val: &'tcx ty::Const<'tcx>,
) -> mir::DestructuredConst<'tcx> {
trace!("destructure_const: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let op = ecx.const_to_op(val, None).unwrap();
// We go to `usize` as we cannot allocate anything bigger anyway.
let (field_count, variant, down) = match val.ty.kind() {
ty::Array(_, len) => (usize::try_from(len.eval_usize(tcx, param_env)).unwrap(), None, op),
ty::Adt(def, _) if def.variants.is_empty() => {
return mir::DestructuredConst { variant: None, fields: &[] };
}
ty::Adt(def, _) => {
let variant = ecx.read_discriminant(&op).unwrap().1;
let down = ecx.operand_downcast(&op, variant).unwrap();
(def.variants[variant].fields.len(), Some(variant), down)
}
ty::Tuple(substs) => (substs.len(), None, op),
_ => bug!("cannot destructure constant {:?}", val),
};
let fields_iter = (0..field_count).map(|i| {
let field_op = ecx.operand_field(&down, i).unwrap();
let val = op_to_const(&ecx, &field_op);
ty::Const::from_value(tcx, val, field_op.layout.ty)
});
let fields = tcx.arena.alloc_from_iter(fields_iter);
mir::DestructuredConst { variant, fields }
}
pub(crate) fn deref_const<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
val: &'tcx ty::Const<'tcx>,
) -> &'tcx ty::Const<'tcx> {
trace!("deref_const: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let op = ecx.const_to_op(val, None).unwrap();
let mplace = ecx.deref_operand(&op).unwrap();
if let Some(alloc_id) = mplace.ptr.provenance {
assert_eq!(
tcx.get_global_alloc(alloc_id).unwrap().unwrap_memory().mutability,
Mutability::Not,
"deref_const cannot be used with mutable allocations as \
that could allow pattern matching to observe mutable statics",
);
}
let ty = match mplace.meta {
MemPlaceMeta::None => mplace.layout.ty,
MemPlaceMeta::Poison => bug!("poison metadata in `deref_const`: {:#?}", mplace),
// In case of unsized types, figure out the real type behind.
MemPlaceMeta::Meta(scalar) => match mplace.layout.ty.kind() {
ty::Str => bug!("there's no sized equivalent of a `str`"),
ty::Slice(elem_ty) => tcx.mk_array(elem_ty, scalar.to_machine_usize(&tcx).unwrap()),
_ => bug!(
"type {} should not have metadata, but had {:?}",
mplace.layout.ty,
mplace.meta
),
},
};
tcx.mk_const(ty::Const { val: ty::ConstKind::Value(op_to_const(&ecx, &mplace.into())), ty })
}