2019-05-17 02:20:14 +01:00
|
|
|
use std::fmt;
|
2018-08-07 17:14:40 +02:00
|
|
|
|
2025-07-01 19:21:41 +02:00
|
|
|
use itertools::Either;
|
2024-10-08 17:27:32 -07:00
|
|
|
use rustc_abi as abi;
|
2025-06-11 02:50:28 -07:00
|
|
|
use rustc_abi::{
|
|
|
|
|
Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants,
|
|
|
|
|
};
|
2023-09-16 09:36:22 +02:00
|
|
|
use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
|
|
|
|
|
use rustc_middle::mir::{self, ConstValue};
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::ty::Ty;
|
2021-08-30 17:38:27 +03:00
|
|
|
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
2025-02-15 16:07:18 -08:00
|
|
|
use rustc_middle::{bug, span_bug};
|
2025-03-28 20:50:28 -07:00
|
|
|
use rustc_session::config::OptLevel;
|
2025-03-12 00:38:14 -07:00
|
|
|
use tracing::{debug, instrument};
|
2024-04-14 00:51:49 -07:00
|
|
|
|
2024-04-10 22:07:21 -07:00
|
|
|
use super::place::{PlaceRef, PlaceValue};
|
2025-07-04 10:35:51 -07:00
|
|
|
use super::rvalue::transmute_scalar;
|
2019-05-17 02:20:14 +01:00
|
|
|
use super::{FunctionCx, LocalRef};
|
2025-06-24 16:58:46 +02:00
|
|
|
use crate::MemFlags;
|
2025-03-12 00:38:14 -07:00
|
|
|
use crate::common::IntPredicate;
|
2019-02-09 23:31:47 +09:00
|
|
|
use crate::traits::*;
|
2024-07-29 08:13:50 +10:00
|
|
|
|
2015-11-13 00:12:50 +02:00
|
|
|
/// The representation of a Rust value. The enum variant is in fact
|
|
|
|
|
/// uniquely determined by the value's type, but is kept as a
|
2015-11-10 22:05:11 +02:00
|
|
|
/// safety check.
|
2018-07-10 13:28:39 +03:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2018-08-02 17:48:44 +03:00
|
|
|
pub enum OperandValue<V> {
|
2015-11-10 22:05:11 +02:00
|
|
|
/// A reference to the actual operand. The data is guaranteed
|
|
|
|
|
/// to be valid for the operand's lifetime.
|
2018-08-03 23:50:13 +09:00
|
|
|
/// The second value, if any, is the extra data (vtable or length)
|
|
|
|
|
/// which indicates that it refers to an unsized rvalue.
|
2023-04-01 01:46:36 -07:00
|
|
|
///
|
2024-04-10 23:08:34 -07:00
|
|
|
/// An `OperandValue` *must* be this variant for any type for which
|
2024-09-17 10:15:26 +10:00
|
|
|
/// [`LayoutTypeCodegenMethods::is_backend_ref`] returns `true`.
|
2024-04-10 23:08:34 -07:00
|
|
|
/// (That basically amounts to "isn't one of the other variants".)
|
|
|
|
|
///
|
|
|
|
|
/// This holds a [`PlaceValue`] (like a [`PlaceRef`] does) with a pointer
|
|
|
|
|
/// to the location holding the value. The type behind that pointer is the
|
2024-09-17 10:15:26 +10:00
|
|
|
/// one returned by [`LayoutTypeCodegenMethods::backend_type`].
|
2024-04-10 23:08:34 -07:00
|
|
|
Ref(PlaceValue<V>),
|
2023-04-01 01:46:36 -07:00
|
|
|
/// A single LLVM immediate value.
|
|
|
|
|
///
|
|
|
|
|
/// An `OperandValue` *must* be this variant for any type for which
|
2024-09-17 10:15:26 +10:00
|
|
|
/// [`LayoutTypeCodegenMethods::is_backend_immediate`] returns `true`.
|
2023-04-01 01:46:36 -07:00
|
|
|
/// The backend value in this variant must be the *immediate* backend type,
|
2024-09-17 10:15:26 +10:00
|
|
|
/// as returned by [`LayoutTypeCodegenMethods::immediate_backend_type`].
|
2018-08-02 17:48:44 +03:00
|
|
|
Immediate(V),
|
2024-10-03 15:05:23 +02:00
|
|
|
/// A pair of immediate LLVM values. Used by wide pointers too.
|
2023-04-01 01:46:36 -07:00
|
|
|
///
|
2025-06-04 14:25:38 -07:00
|
|
|
/// # Invariants
|
|
|
|
|
/// - For `Pair(a, b)`, `a` is always at offset 0, but may have `FieldIdx(1..)`
|
|
|
|
|
/// - `b` is not at offset 0, because `V` is not a 1ZST type.
|
|
|
|
|
/// - `a` and `b` will have a different FieldIdx, but otherwise `b`'s may be lower
|
|
|
|
|
/// or they may not be adjacent, due to arbitrary numbers of 1ZST fields that
|
|
|
|
|
/// will not affect the shape of the data which determines if `Pair` will be used.
|
|
|
|
|
/// - An `OperandValue` *must* be this variant for any type for which
|
2024-09-17 10:15:26 +10:00
|
|
|
/// [`LayoutTypeCodegenMethods::is_backend_scalar_pair`] returns `true`.
|
2025-06-04 14:25:38 -07:00
|
|
|
/// - The backend values in this variant must be the *immediate* backend types,
|
2024-09-17 10:15:26 +10:00
|
|
|
/// as returned by [`LayoutTypeCodegenMethods::scalar_pair_element_backend_type`]
|
2023-04-01 01:46:36 -07:00
|
|
|
/// with `immediate: true`.
|
2018-08-02 17:48:44 +03:00
|
|
|
Pair(V, V),
|
2023-05-07 03:00:41 -07:00
|
|
|
/// A value taking no bytes, and which therefore needs no LLVM value at all.
|
|
|
|
|
///
|
|
|
|
|
/// If you ever need a `V` to pass to something, get a fresh poison value
|
2024-09-17 10:15:26 +10:00
|
|
|
/// from [`ConstCodegenMethods::const_poison`].
|
2023-05-07 03:00:41 -07:00
|
|
|
///
|
|
|
|
|
/// An `OperandValue` *must* be this variant for any type for which
|
2023-08-27 18:12:34 +02:00
|
|
|
/// `is_zst` on its `Layout` returns `true`. Note however that
|
|
|
|
|
/// these values can still require alignment.
|
2023-05-07 03:00:41 -07:00
|
|
|
ZeroSized,
|
2017-09-20 18:17:23 +03:00
|
|
|
}
|
|
|
|
|
|
2024-04-12 19:11:21 -07:00
|
|
|
impl<V: CodegenObject> OperandValue<V> {
|
|
|
|
|
/// Treat this value as a pointer and return the data pointer and
|
|
|
|
|
/// optional metadata as backend values.
|
|
|
|
|
///
|
|
|
|
|
/// If you're making a place, use [`Self::deref`] instead.
|
2024-09-05 15:16:55 +10:00
|
|
|
pub(crate) fn pointer_parts(self) -> (V, Option<V>) {
|
2024-04-12 19:11:21 -07:00
|
|
|
match self {
|
|
|
|
|
OperandValue::Immediate(llptr) => (llptr, None),
|
|
|
|
|
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
|
|
|
|
|
_ => bug!("OperandValue cannot be a pointer: {self:?}"),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Treat this value as a pointer and return the place to which it points.
|
|
|
|
|
///
|
|
|
|
|
/// The pointer immediate doesn't inherently know its alignment,
|
|
|
|
|
/// so you need to pass it in. If you want to get it from a type's ABI
|
|
|
|
|
/// alignment, then maybe you want [`OperandRef::deref`] instead.
|
|
|
|
|
///
|
|
|
|
|
/// This is the inverse of [`PlaceValue::address`].
|
2024-09-05 15:16:55 +10:00
|
|
|
pub(crate) fn deref(self, align: Align) -> PlaceValue<V> {
|
2024-04-12 19:11:21 -07:00
|
|
|
let (llval, llextra) = self.pointer_parts();
|
|
|
|
|
PlaceValue { llval, llextra, align }
|
|
|
|
|
}
|
2024-05-10 22:00:57 -07:00
|
|
|
|
2024-09-17 10:15:26 +10:00
|
|
|
pub(crate) fn is_expected_variant_for_type<'tcx, Cx: LayoutTypeCodegenMethods<'tcx>>(
|
2024-05-10 22:00:57 -07:00
|
|
|
&self,
|
|
|
|
|
cx: &Cx,
|
|
|
|
|
ty: TyAndLayout<'tcx>,
|
|
|
|
|
) -> bool {
|
|
|
|
|
match self {
|
|
|
|
|
OperandValue::ZeroSized => ty.is_zst(),
|
|
|
|
|
OperandValue::Immediate(_) => cx.is_backend_immediate(ty),
|
|
|
|
|
OperandValue::Pair(_, _) => cx.is_backend_scalar_pair(ty),
|
|
|
|
|
OperandValue::Ref(_) => cx.is_backend_ref(ty),
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-04-14 00:51:49 -07:00
|
|
|
}
|
|
|
|
|
|
2015-11-13 00:12:50 +02:00
|
|
|
/// An `OperandRef` is an "SSA" reference to a Rust value, along with
|
|
|
|
|
/// its type.
|
|
|
|
|
///
|
|
|
|
|
/// NOTE: unless you know a value's type exactly, you should not
|
|
|
|
|
/// generate LLVM opcodes acting on it and instead act via methods,
|
2017-06-25 12:41:24 +03:00
|
|
|
/// to avoid nasty edge cases. In particular, using `Builder::store`
|
|
|
|
|
/// directly is sure to cause problems -- use `OperandRef::store`
|
2016-02-01 11:04:46 +01:00
|
|
|
/// instead.
|
2015-11-03 06:35:09 -05:00
|
|
|
#[derive(Copy, Clone)]
|
2018-08-02 17:48:44 +03:00
|
|
|
pub struct OperandRef<'tcx, V> {
|
2022-11-27 11:15:06 +00:00
|
|
|
/// The value.
|
2018-08-02 17:48:44 +03:00
|
|
|
pub val: OperandValue<V>,
|
2015-10-21 17:42:25 -04:00
|
|
|
|
2022-11-27 11:15:06 +00:00
|
|
|
/// The layout of value, based on its Rust type.
|
2020-03-04 14:50:21 +00:00
|
|
|
pub layout: TyAndLayout<'tcx>,
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
|
|
|
|
|
2021-12-13 21:52:35 -05:00
|
|
|
impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
|
2019-02-25 08:52:46 +01:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2017-09-20 18:17:23 +03:00
|
|
|
write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
|
2015-11-10 22:05:11 +02:00
|
|
|
}
|
2016-02-18 19:49:45 +02:00
|
|
|
}
|
|
|
|
|
|
2019-06-14 19:39:39 +03:00
|
|
|
impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
2023-05-07 03:00:41 -07:00
|
|
|
pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
|
2017-09-20 18:17:23 +03:00
|
|
|
assert!(layout.is_zst());
|
2023-05-07 03:00:41 -07:00
|
|
|
OperandRef { val: OperandValue::ZeroSized, layout }
|
2017-03-08 20:03:04 +02:00
|
|
|
}
|
|
|
|
|
|
2024-09-05 15:16:55 +10:00
|
|
|
pub(crate) fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2023-09-16 09:36:22 +02:00
|
|
|
val: mir::ConstValue<'tcx>,
|
2020-02-15 12:57:46 +13:00
|
|
|
ty: Ty<'tcx>,
|
2019-04-22 13:53:52 +02:00
|
|
|
) -> Self {
|
2020-02-15 12:57:46 +13:00
|
|
|
let layout = bx.layout_of(ty);
|
2018-01-16 09:31:48 +01:00
|
|
|
|
2020-02-15 12:57:46 +13:00
|
|
|
let val = match val {
|
2018-05-20 23:46:30 +02:00
|
|
|
ConstValue::Scalar(x) => {
|
2024-10-29 13:37:26 -07:00
|
|
|
let BackendRepr::Scalar(scalar) = layout.backend_repr else {
|
2022-02-19 00:48:49 +01:00
|
|
|
bug!("from_const: invalid ByVal layout: {:#?}", layout);
|
2018-01-16 09:31:48 +01:00
|
|
|
};
|
2019-03-01 15:03:48 +01:00
|
|
|
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
|
2018-01-16 09:31:48 +01:00
|
|
|
OperandValue::Immediate(llval)
|
|
|
|
|
}
|
2023-05-07 03:00:41 -07:00
|
|
|
ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
|
2023-09-15 15:59:47 +02:00
|
|
|
ConstValue::Slice { data, meta } => {
|
2024-10-29 13:37:26 -07:00
|
|
|
let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {
|
2022-02-19 00:48:49 +01:00
|
|
|
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
|
2018-01-16 09:31:48 +01:00
|
|
|
};
|
2021-07-12 20:29:05 +02:00
|
|
|
let a = Scalar::from_pointer(
|
2023-11-25 18:41:53 +01:00
|
|
|
Pointer::new(bx.tcx().reserve_and_set_memory_alloc(data).into(), Size::ZERO),
|
2021-07-12 20:29:05 +02:00
|
|
|
&bx.tcx(),
|
|
|
|
|
);
|
2019-03-01 15:03:48 +01:00
|
|
|
let a_llval = bx.scalar_to_backend(
|
2018-01-16 09:31:48 +01:00
|
|
|
a,
|
|
|
|
|
a_scalar,
|
2019-03-01 15:03:48 +01:00
|
|
|
bx.scalar_pair_element_backend_type(layout, 0, true),
|
2018-01-16 09:31:48 +01:00
|
|
|
);
|
2023-09-15 15:59:47 +02:00
|
|
|
let b_llval = bx.const_usize(meta);
|
2018-01-16 09:31:48 +01:00
|
|
|
OperandValue::Pair(a_llval, b_llval)
|
|
|
|
|
}
|
2023-09-12 07:49:25 +02:00
|
|
|
ConstValue::Indirect { alloc_id, offset } => {
|
2023-09-11 20:01:48 +02:00
|
|
|
let alloc = bx.tcx().global_alloc(alloc_id).unwrap_memory();
|
2023-05-19 15:48:43 +00:00
|
|
|
return Self::from_const_alloc(bx, layout, alloc, offset);
|
2018-01-16 09:31:48 +01:00
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2019-04-22 13:53:52 +02:00
|
|
|
OperandRef { val, layout }
|
2018-01-16 09:31:48 +01:00
|
|
|
}
|
|
|
|
|
|
2023-05-19 15:48:43 +00:00
|
|
|
fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
bx: &mut Bx,
|
|
|
|
|
layout: TyAndLayout<'tcx>,
|
|
|
|
|
alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
|
|
|
|
|
offset: Size,
|
|
|
|
|
) -> Self {
|
|
|
|
|
let alloc_align = alloc.inner().align;
|
2025-03-12 10:26:37 +00:00
|
|
|
assert!(alloc_align >= layout.align.abi, "{alloc_align:?} < {:?}", layout.align.abi);
|
2023-05-19 15:48:43 +00:00
|
|
|
|
|
|
|
|
let read_scalar = |start, size, s: abi::Scalar, ty| {
|
2023-09-23 09:35:44 +00:00
|
|
|
match alloc.0.read_scalar(
|
|
|
|
|
bx,
|
2025-03-02 18:41:28 +00:00
|
|
|
alloc_range(start, size),
|
2024-10-08 17:27:32 -07:00
|
|
|
/*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)),
|
2023-09-23 09:35:44 +00:00
|
|
|
) {
|
2025-03-02 18:41:28 +00:00
|
|
|
Ok(val) => bx.scalar_to_backend(val, s, ty),
|
|
|
|
|
Err(_) => bx.const_poison(ty),
|
2023-09-23 09:35:44 +00:00
|
|
|
}
|
2023-05-19 15:48:43 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// It may seem like all types with `Scalar` or `ScalarPair` ABI are fair game at this point.
|
|
|
|
|
// However, `MaybeUninit<u64>` is considered a `Scalar` as far as its layout is concerned --
|
|
|
|
|
// and yet cannot be represented by an interpreter `Scalar`, since we have to handle the
|
|
|
|
|
// case where some of the bytes are initialized and others are not. So, we need an extra
|
|
|
|
|
// check that walks over the type of `mplace` to make sure it is truly correct to treat this
|
|
|
|
|
// like a `Scalar` (or `ScalarPair`).
|
2024-10-29 13:37:26 -07:00
|
|
|
match layout.backend_repr {
|
2025-03-02 18:41:28 +00:00
|
|
|
BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
|
2023-05-19 15:48:43 +00:00
|
|
|
let size = s.size(bx);
|
|
|
|
|
assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
|
2025-03-02 18:41:28 +00:00
|
|
|
let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
|
|
|
|
|
OperandRef { val: OperandValue::Immediate(val), layout }
|
2023-05-19 15:48:43 +00:00
|
|
|
}
|
2025-03-02 18:41:28 +00:00
|
|
|
BackendRepr::ScalarPair(
|
|
|
|
|
a @ abi::Scalar::Initialized { .. },
|
|
|
|
|
b @ abi::Scalar::Initialized { .. },
|
|
|
|
|
) => {
|
2023-05-19 15:48:43 +00:00
|
|
|
let (a_size, b_size) = (a.size(bx), b.size(bx));
|
2023-09-23 09:35:44 +00:00
|
|
|
let b_offset = (offset + a_size).align_to(b.align(bx).abi);
|
2023-05-19 15:48:43 +00:00
|
|
|
assert!(b_offset.bytes() > 0);
|
|
|
|
|
let a_val = read_scalar(
|
2023-09-23 09:35:44 +00:00
|
|
|
offset,
|
2023-05-19 15:48:43 +00:00
|
|
|
a_size,
|
|
|
|
|
a,
|
|
|
|
|
bx.scalar_pair_element_backend_type(layout, 0, true),
|
|
|
|
|
);
|
|
|
|
|
let b_val = read_scalar(
|
|
|
|
|
b_offset,
|
|
|
|
|
b_size,
|
|
|
|
|
b,
|
|
|
|
|
bx.scalar_pair_element_backend_type(layout, 1, true),
|
|
|
|
|
);
|
2025-03-02 18:41:28 +00:00
|
|
|
OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
|
2023-05-19 15:48:43 +00:00
|
|
|
}
|
2025-03-02 18:41:28 +00:00
|
|
|
_ if layout.is_zst() => OperandRef::zero_sized(layout),
|
|
|
|
|
_ => {
|
|
|
|
|
// Neither a scalar nor scalar pair. Load from a place
|
|
|
|
|
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
|
|
|
|
|
// same `ConstAllocation`?
|
|
|
|
|
let init = bx.const_data_from_alloc(alloc);
|
|
|
|
|
let base_addr = bx.static_addr_of(init, alloc_align, None);
|
2025-01-10 11:28:20 +00:00
|
|
|
|
2025-03-02 18:41:28 +00:00
|
|
|
let llval = bx.const_ptr_byte_offset(base_addr, offset);
|
|
|
|
|
bx.load_operand(PlaceRef::new_sized(llval, layout))
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-05-19 15:48:43 +00:00
|
|
|
}
|
|
|
|
|
|
2016-02-18 19:49:45 +02:00
|
|
|
/// Asserts that this operand refers to a scalar and returns
|
|
|
|
|
/// a reference to its value.
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn immediate(self) -> V {
|
2016-02-18 19:49:45 +02:00
|
|
|
match self.val {
|
|
|
|
|
OperandValue::Immediate(s) => s,
|
2016-12-31 04:55:29 +02:00
|
|
|
_ => bug!("not immediate: {:?}", self),
|
2016-02-18 19:49:45 +02:00
|
|
|
}
|
|
|
|
|
}
|
2016-05-25 11:55:44 +03:00
|
|
|
|
2024-04-12 19:11:21 -07:00
|
|
|
/// Asserts that this operand is a pointer (or reference) and returns
|
|
|
|
|
/// the place to which it points. (This requires no code to be emitted
|
|
|
|
|
/// as we represent places using the pointer to the place.)
|
|
|
|
|
///
|
|
|
|
|
/// This uses [`Ty::builtin_deref`] to include the type of the place and
|
|
|
|
|
/// assumes the place is aligned to the pointee's usual ABI alignment.
|
|
|
|
|
///
|
|
|
|
|
/// If you don't need the type, see [`OperandValue::pointer_parts`]
|
|
|
|
|
/// or [`OperandValue::deref`].
|
2024-09-16 20:17:34 +10:00
|
|
|
pub fn deref<Cx: CodegenMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
|
2022-05-18 21:49:46 -07:00
|
|
|
if self.layout.ty.is_box() {
|
2024-03-05 11:32:03 +01:00
|
|
|
// Derefer should have removed all Box derefs
|
2022-05-18 21:49:46 -07:00
|
|
|
bug!("dereferencing {:?} in codegen", self.layout.ty);
|
2022-05-13 21:53:03 -07:00
|
|
|
}
|
|
|
|
|
|
2018-01-28 23:29:40 +02:00
|
|
|
let projected_ty = self
|
|
|
|
|
.layout
|
|
|
|
|
.ty
|
|
|
|
|
.builtin_deref(true)
|
2024-05-09 22:45:14 -04:00
|
|
|
.unwrap_or_else(|| bug!("deref of non-pointer {:?}", self));
|
2022-05-13 21:53:03 -07:00
|
|
|
|
2018-01-05 07:04:08 +02:00
|
|
|
let layout = cx.layout_of(projected_ty);
|
2024-04-12 19:11:21 -07:00
|
|
|
self.val.deref(layout.align.abi).with_type(layout)
|
2017-03-14 01:08:21 +02:00
|
|
|
}
|
|
|
|
|
|
2017-10-05 04:22:23 +03:00
|
|
|
/// If this operand is a `Pair`, we return an aggregate with the two values.
|
|
|
|
|
/// For other cases, see `immediate`.
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> V {
|
2016-05-25 11:55:44 +03:00
|
|
|
if let OperandValue::Pair(a, b) = self.val {
|
Separate immediate and in-memory ScalarPair representation
Currently, we assume that ScalarPair is always represented using
a two-element struct, both as an immediate value and when stored
in memory.
This currently works fairly well, but runs into problems with
https://github.com/rust-lang/rust/pull/116672, where a ScalarPair
involving an i128 type can no longer be represented as a two-element
struct in memory. For example, the tuple `(i32, i128)` needs to be
represented in-memory as `{ i32, [3 x i32], i128 }` to satisfy
alignment requirement. Using `{ i32, i128 }` instead will result in
the second element being stored at the wrong offset (prior to
LLVM 18).
Resolve this issue by no longer requiring that the immediate and
in-memory type for ScalarPair are the same. The in-memory type
will now look the same as for normal struct types (and will include
padding filler and similar), while the immediate type stays a
simple two-element struct type. This also means that booleans in
immediate ScalarPair are now represented as i1 rather than i8,
just like we do everywhere else.
The core change here is to llvm_type (which now treats ScalarPair
as a normal struct) and immediate_llvm_type (which returns the
two-element struct that llvm_type used to produce). The rest is
fixing things up to no longer assume these are the same. In
particular, this switches places that try to get pointers to the
ScalarPair elements to use byte-geps instead of struct-geps.
2023-12-15 12:14:39 +01:00
|
|
|
let llty = bx.cx().immediate_backend_type(self.layout);
|
2017-10-05 04:22:23 +03:00
|
|
|
debug!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty);
|
2017-09-15 22:42:23 +03:00
|
|
|
// Reconstruct the immediate aggregate.
|
2023-03-16 14:56:02 +01:00
|
|
|
let mut llpair = bx.cx().const_poison(llty);
|
Separate immediate and in-memory ScalarPair representation
Currently, we assume that ScalarPair is always represented using
a two-element struct, both as an immediate value and when stored
in memory.
This currently works fairly well, but runs into problems with
https://github.com/rust-lang/rust/pull/116672, where a ScalarPair
involving an i128 type can no longer be represented as a two-element
struct in memory. For example, the tuple `(i32, i128)` needs to be
represented in-memory as `{ i32, [3 x i32], i128 }` to satisfy
alignment requirement. Using `{ i32, i128 }` instead will result in
the second element being stored at the wrong offset (prior to
LLVM 18).
Resolve this issue by no longer requiring that the immediate and
in-memory type for ScalarPair are the same. The in-memory type
will now look the same as for normal struct types (and will include
padding filler and similar), while the immediate type stays a
simple two-element struct type. This also means that booleans in
immediate ScalarPair are now represented as i1 rather than i8,
just like we do everywhere else.
The core change here is to llvm_type (which now treats ScalarPair
as a normal struct) and immediate_llvm_type (which returns the
two-element struct that llvm_type used to produce). The rest is
fixing things up to no longer assume these are the same. In
particular, this switches places that try to get pointers to the
ScalarPair elements to use byte-geps instead of struct-geps.
2023-12-15 12:14:39 +01:00
|
|
|
llpair = bx.insert_value(llpair, a, 0);
|
|
|
|
|
llpair = bx.insert_value(llpair, b, 1);
|
2017-10-05 04:22:23 +03:00
|
|
|
llpair
|
|
|
|
|
} else {
|
|
|
|
|
self.immediate()
|
2016-05-25 11:55:44 +03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-10-05 04:22:23 +03:00
|
|
|
/// If the type is a pair, we return a `Pair`, otherwise, an `Immediate`.
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
llval: V,
|
2020-03-04 14:50:21 +00:00
|
|
|
layout: TyAndLayout<'tcx>,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> Self {
|
2024-10-29 13:37:26 -07:00
|
|
|
let val = if let BackendRepr::ScalarPair(..) = layout.backend_repr {
|
2017-10-05 04:22:23 +03:00
|
|
|
debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
|
2016-06-20 23:55:14 +03:00
|
|
|
|
2017-10-05 04:22:23 +03:00
|
|
|
// Deconstruct the immediate aggregate.
|
2018-10-05 15:08:49 +02:00
|
|
|
let a_llval = bx.extract_value(llval, 0);
|
|
|
|
|
let b_llval = bx.extract_value(llval, 1);
|
Store scalar pair bools as i8 in memory
We represent `bool` as `i1` in a `ScalarPair`, unlike other aggregates,
to optimize IR for checked operators and the like. With this patch, we
still do so when the pair is an immediate value, but we use the `i8`
memory type when the value is loaded or stored as an LLVM aggregate.
So `(bool, bool)` looks like an `{ i1, i1 }` immediate, but `{ i8, i8 }`
in memory. When a pair is a direct function argument, `PassMode::Pair`,
it is still passed using the immediate `i1` type, but as a return value
it will use the `i8` memory type. Also, `bool`-like` enum tags will now
use scalar pairs when possible, where they were previously excluded due
to optimization issues.
2018-06-15 15:47:54 -07:00
|
|
|
OperandValue::Pair(a_llval, b_llval)
|
2017-10-05 04:22:23 +03:00
|
|
|
} else {
|
|
|
|
|
OperandValue::Immediate(llval)
|
|
|
|
|
};
|
|
|
|
|
OperandRef { val, layout }
|
2016-05-25 11:55:44 +03:00
|
|
|
}
|
2017-10-09 19:56:41 +03:00
|
|
|
|
2024-09-05 15:16:55 +10:00
|
|
|
pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-08-02 17:48:44 +03:00
|
|
|
&self,
|
2025-02-15 16:07:18 -08:00
|
|
|
fx: &mut FunctionCx<'a, 'tcx, Bx>,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
i: usize,
|
|
|
|
|
) -> Self {
|
2018-08-28 17:50:57 +02:00
|
|
|
let field = self.layout.field(bx.cx(), i);
|
2017-10-09 19:56:41 +03:00
|
|
|
let offset = self.layout.fields.offset(i);
|
|
|
|
|
|
2025-02-20 22:26:24 -08:00
|
|
|
if !bx.is_backend_ref(self.layout) && bx.is_backend_ref(field) {
|
2025-03-06 19:13:46 -08:00
|
|
|
// Part of https://github.com/rust-lang/compiler-team/issues/838
|
|
|
|
|
span_bug!(
|
|
|
|
|
fx.mir.span,
|
|
|
|
|
"Non-ref type {self:?} cannot project to ref field type {field:?}",
|
|
|
|
|
);
|
2025-02-20 22:26:24 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let val = if field.is_zst() {
|
|
|
|
|
OperandValue::ZeroSized
|
2025-07-03 22:23:15 -07:00
|
|
|
} else if let BackendRepr::SimdVector { .. } = self.layout.backend_repr {
|
|
|
|
|
// codegen_transmute_operand doesn't support SIMD, but since the previous
|
|
|
|
|
// check handled ZSTs, the only possible field access into something SIMD
|
|
|
|
|
// is to the `non_1zst_field` that's the same SIMD. (Other things, even
|
|
|
|
|
// just padding, would change the wrapper's representation type.)
|
|
|
|
|
assert_eq!(field.size, self.layout.size);
|
|
|
|
|
self.val
|
2025-02-20 22:26:24 -08:00
|
|
|
} else if field.size == self.layout.size {
|
|
|
|
|
assert_eq!(offset.bytes(), 0);
|
2025-07-04 12:12:07 -07:00
|
|
|
fx.codegen_transmute_operand(bx, *self, field)
|
2025-02-15 16:07:18 -08:00
|
|
|
} else {
|
|
|
|
|
let (in_scalar, imm) = match (self.val, self.layout.backend_repr) {
|
|
|
|
|
// Extract a scalar component from a pair.
|
|
|
|
|
(OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
|
|
|
|
|
if offset.bytes() == 0 {
|
|
|
|
|
assert_eq!(field.size, a.size(bx.cx()));
|
|
|
|
|
(Some(a), a_llval)
|
|
|
|
|
} else {
|
|
|
|
|
assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
|
|
|
|
|
assert_eq!(field.size, b.size(bx.cx()));
|
|
|
|
|
(Some(b), b_llval)
|
|
|
|
|
}
|
2017-10-09 19:56:41 +03:00
|
|
|
}
|
|
|
|
|
|
2025-02-15 16:07:18 -08:00
|
|
|
_ => {
|
|
|
|
|
span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self)
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
OperandValue::Immediate(match field.backend_repr {
|
2025-02-28 16:29:07 +01:00
|
|
|
BackendRepr::SimdVector { .. } => imm,
|
2025-02-15 16:07:18 -08:00
|
|
|
BackendRepr::Scalar(out_scalar) => {
|
|
|
|
|
let Some(in_scalar) = in_scalar else {
|
|
|
|
|
span_bug!(
|
|
|
|
|
fx.mir.span,
|
|
|
|
|
"OperandRef::extract_field({:?}): missing input scalar for output scalar",
|
|
|
|
|
self
|
|
|
|
|
)
|
|
|
|
|
};
|
|
|
|
|
if in_scalar != out_scalar {
|
|
|
|
|
// If the backend and backend_immediate types might differ,
|
|
|
|
|
// flip back to the backend type then to the new immediate.
|
|
|
|
|
// This avoids nop truncations, but still handles things like
|
|
|
|
|
// Bools in union fields needs to be truncated.
|
|
|
|
|
let backend = bx.from_immediate(imm);
|
|
|
|
|
bx.to_immediate_scalar(backend, out_scalar)
|
|
|
|
|
} else {
|
|
|
|
|
imm
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-02-20 22:26:24 -08:00
|
|
|
BackendRepr::ScalarPair(_, _) | BackendRepr::Memory { .. } => bug!(),
|
2025-02-15 16:07:18 -08:00
|
|
|
})
|
2017-10-09 19:56:41 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
OperandRef { val, layout: field }
|
|
|
|
|
}
|
2025-03-12 00:38:14 -07:00
|
|
|
|
|
|
|
|
/// Obtain the actual discriminant of a value.
|
|
|
|
|
#[instrument(level = "trace", skip(fx, bx))]
|
|
|
|
|
pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
self,
|
|
|
|
|
fx: &mut FunctionCx<'a, 'tcx, Bx>,
|
|
|
|
|
bx: &mut Bx,
|
|
|
|
|
cast_to: Ty<'tcx>,
|
|
|
|
|
) -> V {
|
|
|
|
|
let dl = &bx.tcx().data_layout;
|
|
|
|
|
let cast_to_layout = bx.cx().layout_of(cast_to);
|
|
|
|
|
let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
|
2025-03-13 00:39:18 -07:00
|
|
|
|
|
|
|
|
// We check uninhabitedness separately because a type like
|
|
|
|
|
// `enum Foo { Bar(i32, !) }` is still reported as `Variants::Single`,
|
|
|
|
|
// *not* as `Variants::Empty`.
|
2025-03-12 00:38:14 -07:00
|
|
|
if self.layout.is_uninhabited() {
|
|
|
|
|
return bx.cx().const_poison(cast_to);
|
|
|
|
|
}
|
2025-03-13 00:39:18 -07:00
|
|
|
|
2025-03-12 00:38:14 -07:00
|
|
|
let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
|
|
|
|
|
Variants::Empty => unreachable!("we already handled uninhabited types"),
|
|
|
|
|
Variants::Single { index } => {
|
|
|
|
|
let discr_val =
|
|
|
|
|
if let Some(discr) = self.layout.ty.discriminant_for_variant(bx.tcx(), index) {
|
|
|
|
|
discr.val
|
|
|
|
|
} else {
|
2025-03-13 00:39:18 -07:00
|
|
|
// This arm is for types which are neither enums nor coroutines,
|
|
|
|
|
// and thus for which the only possible "variant" should be the first one.
|
2025-03-12 00:38:14 -07:00
|
|
|
assert_eq!(index, FIRST_VARIANT);
|
2025-03-13 00:39:18 -07:00
|
|
|
// There's thus no actual discriminant to return, so we return
|
|
|
|
|
// what it would have been if this was a single-variant enum.
|
2025-03-12 00:38:14 -07:00
|
|
|
0
|
|
|
|
|
};
|
|
|
|
|
return bx.cx().const_uint_big(cast_to, discr_val);
|
|
|
|
|
}
|
|
|
|
|
Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
|
|
|
|
|
(tag, tag_encoding, tag_field)
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Read the tag/niche-encoded discriminant from memory.
|
|
|
|
|
let tag_op = match self.val {
|
|
|
|
|
OperandValue::ZeroSized => bug!(),
|
|
|
|
|
OperandValue::Immediate(_) | OperandValue::Pair(_, _) => {
|
2025-06-03 23:42:21 -07:00
|
|
|
self.extract_field(fx, bx, tag_field.as_usize())
|
2025-03-12 00:38:14 -07:00
|
|
|
}
|
|
|
|
|
OperandValue::Ref(place) => {
|
2025-06-03 23:42:21 -07:00
|
|
|
let tag = place.with_type(self.layout).project_field(bx, tag_field.as_usize());
|
2025-03-12 00:38:14 -07:00
|
|
|
bx.load_operand(tag)
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let tag_imm = tag_op.immediate();
|
|
|
|
|
|
|
|
|
|
// Decode the discriminant (specifically if it's niche-encoded).
|
|
|
|
|
match *tag_encoding {
|
|
|
|
|
TagEncoding::Direct => {
|
|
|
|
|
let signed = match tag_scalar.primitive() {
|
|
|
|
|
// We use `i1` for bytes that are always `0` or `1`,
|
|
|
|
|
// e.g., `#[repr(i8)] enum E { A, B }`, but we can't
|
|
|
|
|
// let LLVM interpret the `i1` as signed, because
|
|
|
|
|
// then `i1 1` (i.e., `E::B`) is effectively `i8 -1`.
|
|
|
|
|
Primitive::Int(_, signed) => !tag_scalar.is_bool() && signed,
|
|
|
|
|
_ => false,
|
|
|
|
|
};
|
|
|
|
|
bx.intcast(tag_imm, cast_to, signed)
|
|
|
|
|
}
|
|
|
|
|
TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
|
|
|
|
|
// Cast to an integer so we don't have to treat a pointer as a
|
|
|
|
|
// special case.
|
|
|
|
|
let (tag, tag_llty) = match tag_scalar.primitive() {
|
|
|
|
|
// FIXME(erikdesjardins): handle non-default addrspace ptr sizes
|
|
|
|
|
Primitive::Pointer(_) => {
|
|
|
|
|
let t = bx.type_from_integer(dl.ptr_sized_integer());
|
|
|
|
|
let tag = bx.ptrtoint(tag_imm, t);
|
|
|
|
|
(tag, t)
|
|
|
|
|
}
|
|
|
|
|
_ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
|
|
|
|
|
};
|
|
|
|
|
|
2025-06-28 13:54:44 +02:00
|
|
|
// `layout_sanity_check` ensures that we only get here for cases where the discriminant
|
2025-03-28 20:50:28 -07:00
|
|
|
// value and the variant index match, since that's all `Niche` can encode.
|
|
|
|
|
|
2025-03-12 00:38:14 -07:00
|
|
|
let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
|
2025-07-11 05:07:37 -07:00
|
|
|
let niche_start_const = bx.cx().const_uint_big(tag_llty, niche_start);
|
2025-03-12 00:38:14 -07:00
|
|
|
|
|
|
|
|
// We have a subrange `niche_start..=niche_end` inside `range`.
|
|
|
|
|
// If the value of the tag is inside this subrange, it's a
|
|
|
|
|
// "niche value", an increment of the discriminant. Otherwise it
|
|
|
|
|
// indicates the untagged variant.
|
|
|
|
|
// A general algorithm to extract the discriminant from the tag
|
|
|
|
|
// is:
|
|
|
|
|
// relative_tag = tag - niche_start
|
|
|
|
|
// is_niche = relative_tag <= (ule) relative_max
|
|
|
|
|
// discr = if is_niche {
|
|
|
|
|
// cast(relative_tag) + niche_variants.start()
|
|
|
|
|
// } else {
|
|
|
|
|
// untagged_variant
|
|
|
|
|
// }
|
|
|
|
|
// However, we will likely be able to emit simpler code.
|
|
|
|
|
let (is_niche, tagged_discr, delta) = if relative_max == 0 {
|
|
|
|
|
// Best case scenario: only one tagged variant. This will
|
|
|
|
|
// likely become just a comparison and a jump.
|
|
|
|
|
// The algorithm is:
|
|
|
|
|
// is_niche = tag == niche_start
|
|
|
|
|
// discr = if is_niche {
|
|
|
|
|
// niche_start
|
|
|
|
|
// } else {
|
|
|
|
|
// untagged_variant
|
|
|
|
|
// }
|
2025-07-11 05:07:37 -07:00
|
|
|
let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start_const);
|
2025-03-12 00:38:14 -07:00
|
|
|
let tagged_discr =
|
|
|
|
|
bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
|
|
|
|
|
(is_niche, tagged_discr, 0)
|
|
|
|
|
} else {
|
2025-07-15 22:28:05 -07:00
|
|
|
// With multiple niched variants we'll have to actually compute
|
|
|
|
|
// the variant index from the stored tag.
|
|
|
|
|
//
|
|
|
|
|
// However, there's still one small optimization we can often do for
|
|
|
|
|
// determining *whether* a tag value is a natural value or a niched
|
|
|
|
|
// variant. The general algorithm involves a subtraction that often
|
|
|
|
|
// wraps in practice, making it tricky to analyse. However, in cases
|
|
|
|
|
// where there are few enough possible values of the tag that it doesn't
|
|
|
|
|
// need to wrap around, we can instead just look for the contiguous
|
|
|
|
|
// tag values on the end of the range with a single comparison.
|
|
|
|
|
//
|
|
|
|
|
// For example, take the type `enum Demo { A, B, Untagged(bool) }`.
|
|
|
|
|
// The `bool` is {0, 1}, and the two other variants are given the
|
|
|
|
|
// tags {2, 3} respectively. That means the `tag_range` is
|
|
|
|
|
// `[0, 3]`, which doesn't wrap as unsigned (nor as signed), so
|
|
|
|
|
// we can test for the niched variants with just `>= 2`.
|
|
|
|
|
//
|
|
|
|
|
// That means we're looking either for the niche values *above*
|
|
|
|
|
// the natural values of the untagged variant:
|
|
|
|
|
//
|
|
|
|
|
// niche_start niche_end
|
|
|
|
|
// | |
|
|
|
|
|
// v v
|
|
|
|
|
// MIN -------------+---------------------------+---------- MAX
|
|
|
|
|
// ^ | is niche |
|
|
|
|
|
// | +---------------------------+
|
|
|
|
|
// | |
|
|
|
|
|
// tag_range.start tag_range.end
|
|
|
|
|
//
|
|
|
|
|
// Or *below* the natural values:
|
|
|
|
|
//
|
|
|
|
|
// niche_start niche_end
|
|
|
|
|
// | |
|
|
|
|
|
// v v
|
|
|
|
|
// MIN ----+-----------------------+---------------------- MAX
|
|
|
|
|
// | is niche | ^
|
|
|
|
|
// +-----------------------+ |
|
|
|
|
|
// | |
|
|
|
|
|
// tag_range.start tag_range.end
|
|
|
|
|
//
|
|
|
|
|
// With those two options and having the flexibility to choose
|
|
|
|
|
// between a signed or unsigned comparison on the tag, that
|
|
|
|
|
// covers most realistic scenarios. The tests have a (contrived)
|
|
|
|
|
// example of a 1-byte enum with over 128 niched variants which
|
|
|
|
|
// wraps both as signed as unsigned, though, and for something
|
|
|
|
|
// like that we're stuck with the general algorithm.
|
2025-07-11 05:07:37 -07:00
|
|
|
|
|
|
|
|
let tag_range = tag_scalar.valid_range(&dl);
|
|
|
|
|
let tag_size = tag_scalar.size(&dl);
|
|
|
|
|
let niche_end = u128::from(relative_max).wrapping_add(niche_start);
|
|
|
|
|
let niche_end = tag_size.truncate(niche_end);
|
|
|
|
|
|
|
|
|
|
let relative_discr = bx.sub(tag, niche_start_const);
|
2025-03-12 00:38:14 -07:00
|
|
|
let cast_tag = bx.intcast(relative_discr, cast_to, false);
|
2025-07-11 05:07:37 -07:00
|
|
|
let is_niche = if tag_range.no_unsigned_wraparound(tag_size) == Ok(true) {
|
|
|
|
|
if niche_start == tag_range.start {
|
|
|
|
|
let niche_end_const = bx.cx().const_uint_big(tag_llty, niche_end);
|
|
|
|
|
bx.icmp(IntPredicate::IntULE, tag, niche_end_const)
|
|
|
|
|
} else {
|
|
|
|
|
assert_eq!(niche_end, tag_range.end);
|
|
|
|
|
bx.icmp(IntPredicate::IntUGE, tag, niche_start_const)
|
|
|
|
|
}
|
|
|
|
|
} else if tag_range.no_signed_wraparound(tag_size) == Ok(true) {
|
|
|
|
|
if niche_start == tag_range.start {
|
|
|
|
|
let niche_end_const = bx.cx().const_uint_big(tag_llty, niche_end);
|
|
|
|
|
bx.icmp(IntPredicate::IntSLE, tag, niche_end_const)
|
|
|
|
|
} else {
|
|
|
|
|
assert_eq!(niche_end, tag_range.end);
|
|
|
|
|
bx.icmp(IntPredicate::IntSGE, tag, niche_start_const)
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
bx.icmp(
|
|
|
|
|
IntPredicate::IntULE,
|
|
|
|
|
relative_discr,
|
|
|
|
|
bx.cx().const_uint(tag_llty, relative_max as u64),
|
|
|
|
|
)
|
|
|
|
|
};
|
2025-03-28 20:50:28 -07:00
|
|
|
|
2025-03-12 00:38:14 -07:00
|
|
|
(is_niche, cast_tag, niche_variants.start().as_u32() as u128)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let tagged_discr = if delta == 0 {
|
|
|
|
|
tagged_discr
|
|
|
|
|
} else {
|
|
|
|
|
bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
|
|
|
|
|
};
|
|
|
|
|
|
2025-07-11 05:07:37 -07:00
|
|
|
let untagged_variant_const =
|
|
|
|
|
bx.cx().const_uint(cast_to, u64::from(untagged_variant.as_u32()));
|
|
|
|
|
|
|
|
|
|
// Thanks to parameter attributes and load metadata, LLVM already knows
|
|
|
|
|
// the general valid range of the tag. It's possible, though, for there
|
|
|
|
|
// to be an impossible value *in the middle*, which those ranges don't
|
|
|
|
|
// communicate, so it's worth an `assume` to let the optimizer know.
|
|
|
|
|
// Most importantly, this means when optimizing a variant test like
|
|
|
|
|
// `SELECT(is_niche, complex, CONST) == CONST` it's ok to simplify that
|
|
|
|
|
// to `!is_niche` because the `complex` part can't possibly match.
|
|
|
|
|
if niche_variants.contains(&untagged_variant)
|
|
|
|
|
&& bx.cx().sess().opts.optimize != OptLevel::No
|
|
|
|
|
{
|
|
|
|
|
let ne = bx.icmp(IntPredicate::IntNE, tagged_discr, untagged_variant_const);
|
|
|
|
|
bx.assume(ne);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let discr = bx.select(is_niche, tagged_discr, untagged_variant_const);
|
2025-03-12 00:38:14 -07:00
|
|
|
|
|
|
|
|
// In principle we could insert assumes on the possible range of `discr`, but
|
2025-04-07 18:12:06 -07:00
|
|
|
// currently in LLVM this isn't worth it because the original `tag` will
|
|
|
|
|
// have either a `range` parameter attribute or `!range` metadata,
|
|
|
|
|
// or come from a `transmute` that already `assume`d it.
|
2025-03-12 00:38:14 -07:00
|
|
|
|
|
|
|
|
discr
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
}
|
2025-06-11 02:50:28 -07:00
|
|
|
|
2025-07-04 23:16:41 -07:00
|
|
|
/// Each of these variants starts out as `Either::Right` when it's uninitialized,
|
|
|
|
|
/// then setting the field changes that to `Either::Left` with the backend value.
|
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
|
|
|
enum OperandValueBuilder<V> {
|
|
|
|
|
ZeroSized,
|
|
|
|
|
Immediate(Either<V, abi::Scalar>),
|
|
|
|
|
Pair(Either<V, abi::Scalar>, Either<V, abi::Scalar>),
|
|
|
|
|
/// `repr(simd)` types need special handling because they each have a non-empty
|
|
|
|
|
/// array field (which uses [`OperandValue::Ref`]) despite the SIMD type itself
|
|
|
|
|
/// using [`OperandValue::Immediate`] which for any other kind of type would
|
|
|
|
|
/// mean that its one non-ZST field would also be [`OperandValue::Immediate`].
|
|
|
|
|
Vector(Either<V, ()>),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Allows building up an `OperandRef` by setting fields one at a time.
|
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
|
|
|
pub(super) struct OperandRefBuilder<'tcx, V> {
|
|
|
|
|
val: OperandValueBuilder<V>,
|
|
|
|
|
layout: TyAndLayout<'tcx>,
|
|
|
|
|
}
|
2025-06-18 23:32:15 -07:00
|
|
|
|
2025-07-04 23:16:41 -07:00
|
|
|
impl<'a, 'tcx, V: CodegenObject> OperandRefBuilder<'tcx, V> {
|
|
|
|
|
/// Creates an uninitialized builder for an instance of the `layout`.
|
|
|
|
|
///
|
|
|
|
|
/// ICEs for [`BackendRepr::Memory`] types (other than ZSTs), which should
|
|
|
|
|
/// be built up inside a [`PlaceRef`] instead as they need an allocated place
|
|
|
|
|
/// into which to write the values of the fields.
|
|
|
|
|
pub(super) fn new(layout: TyAndLayout<'tcx>) -> Self {
|
2025-06-11 02:50:28 -07:00
|
|
|
let val = match layout.backend_repr {
|
2025-07-04 23:16:41 -07:00
|
|
|
BackendRepr::Memory { .. } if layout.is_zst() => OperandValueBuilder::ZeroSized,
|
|
|
|
|
BackendRepr::Scalar(s) => OperandValueBuilder::Immediate(Either::Right(s)),
|
|
|
|
|
BackendRepr::ScalarPair(a, b) => {
|
|
|
|
|
OperandValueBuilder::Pair(Either::Right(a), Either::Right(b))
|
|
|
|
|
}
|
|
|
|
|
BackendRepr::SimdVector { .. } => OperandValueBuilder::Vector(Either::Right(())),
|
|
|
|
|
BackendRepr::Memory { .. } => {
|
|
|
|
|
bug!("Cannot use non-ZST Memory-ABI type in operand builder: {layout:?}");
|
|
|
|
|
}
|
2025-06-11 02:50:28 -07:00
|
|
|
};
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandRefBuilder { val, layout }
|
2025-06-11 02:50:28 -07:00
|
|
|
}
|
|
|
|
|
|
2025-07-04 23:16:41 -07:00
|
|
|
pub(super) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2025-06-11 02:50:28 -07:00
|
|
|
&mut self,
|
|
|
|
|
bx: &mut Bx,
|
2025-07-04 23:16:41 -07:00
|
|
|
variant: VariantIdx,
|
|
|
|
|
field: FieldIdx,
|
|
|
|
|
field_operand: OperandRef<'tcx, V>,
|
2025-06-11 02:50:28 -07:00
|
|
|
) {
|
2025-07-04 23:16:41 -07:00
|
|
|
if let OperandValue::ZeroSized = field_operand.val {
|
|
|
|
|
// A ZST never adds any state, so just ignore it.
|
|
|
|
|
// This special-casing is worth it because of things like
|
|
|
|
|
// `Result<!, !>` where `Ok(never)` is legal to write,
|
|
|
|
|
// but the type shows as FieldShape::Primitive so we can't
|
|
|
|
|
// actually look at the layout for the field being set.
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let is_zero_offset = if let abi::FieldsShape::Primitive = self.layout.fields {
|
2025-06-11 02:50:28 -07:00
|
|
|
// The other branch looking at field layouts ICEs for primitives,
|
|
|
|
|
// so we need to handle them separately.
|
2025-07-04 23:16:41 -07:00
|
|
|
// Because we handled ZSTs above (like the metadata in a thin pointer),
|
|
|
|
|
// the only possibility is that we're setting the one-and-only field.
|
2025-06-11 02:50:28 -07:00
|
|
|
assert!(!self.layout.is_zst());
|
2025-07-04 23:16:41 -07:00
|
|
|
assert_eq!(variant, FIRST_VARIANT);
|
|
|
|
|
assert_eq!(field, FieldIdx::ZERO);
|
|
|
|
|
true
|
2025-06-11 02:50:28 -07:00
|
|
|
} else {
|
2025-07-04 23:16:41 -07:00
|
|
|
let variant_layout = self.layout.for_variant(bx.cx(), variant);
|
|
|
|
|
let field_offset = variant_layout.fields.offset(field.as_usize());
|
|
|
|
|
field_offset == Size::ZERO
|
2025-06-11 02:50:28 -07:00
|
|
|
};
|
|
|
|
|
|
2025-07-01 19:21:41 +02:00
|
|
|
let mut update = |tgt: &mut Either<V, abi::Scalar>, src, from_scalar| {
|
|
|
|
|
let to_scalar = tgt.unwrap_right();
|
2025-07-04 23:16:41 -07:00
|
|
|
// We transmute here (rather than just `from_immediate`) because in
|
|
|
|
|
// `Result<usize, *const ()>` the field of the `Ok` is an integer,
|
|
|
|
|
// but the corresponding scalar in the enum is a pointer.
|
2025-07-04 10:35:51 -07:00
|
|
|
let imm = transmute_scalar(bx, src, from_scalar, to_scalar);
|
2025-07-01 19:21:41 +02:00
|
|
|
*tgt = Either::Left(imm);
|
2025-06-11 02:50:28 -07:00
|
|
|
};
|
|
|
|
|
|
2025-07-04 23:16:41 -07:00
|
|
|
match (field_operand.val, field_operand.layout.backend_repr) {
|
|
|
|
|
(OperandValue::ZeroSized, _) => unreachable!("Handled above"),
|
2025-06-11 02:50:28 -07:00
|
|
|
(OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::Immediate(val @ Either::Right(_)) if is_zero_offset => {
|
2025-06-11 02:50:28 -07:00
|
|
|
update(val, v, from_scalar);
|
|
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
|
2025-06-11 02:50:28 -07:00
|
|
|
update(fst, v, from_scalar);
|
|
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
|
2025-06-11 02:50:28 -07:00
|
|
|
update(snd, v, from_scalar);
|
|
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
_ => {
|
|
|
|
|
bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
(OperandValue::Immediate(v), BackendRepr::SimdVector { .. }) => match &mut self.val {
|
|
|
|
|
OperandValueBuilder::Vector(val @ Either::Right(())) if is_zero_offset => {
|
|
|
|
|
*val = Either::Left(v);
|
|
|
|
|
}
|
|
|
|
|
_ => {
|
|
|
|
|
bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
|
|
|
|
|
}
|
2025-06-11 02:50:28 -07:00
|
|
|
},
|
|
|
|
|
(OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
|
|
|
|
|
match &mut self.val {
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::Pair(fst @ Either::Right(_), snd @ Either::Right(_)) => {
|
2025-06-11 02:50:28 -07:00
|
|
|
update(fst, a, from_sa);
|
|
|
|
|
update(snd, b, from_sb);
|
|
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
_ => bug!(
|
|
|
|
|
"Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}"
|
|
|
|
|
),
|
2025-06-11 02:50:28 -07:00
|
|
|
}
|
|
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
(OperandValue::Ref(place), BackendRepr::Memory { .. }) => match &mut self.val {
|
|
|
|
|
OperandValueBuilder::Vector(val @ Either::Right(())) => {
|
|
|
|
|
let ibty = bx.cx().immediate_backend_type(self.layout);
|
|
|
|
|
let simd = bx.load_from_place(ibty, place);
|
|
|
|
|
*val = Either::Left(simd);
|
|
|
|
|
}
|
|
|
|
|
_ => {
|
|
|
|
|
bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
_ => bug!("Operand cannot be used with `insert_field`: {field_operand:?}"),
|
2025-06-11 02:50:28 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-06-18 23:32:15 -07:00
|
|
|
/// Insert the immediate value `imm` for field `f` in the *type itself*,
|
|
|
|
|
/// rather than into one of the variants.
|
|
|
|
|
///
|
2025-07-04 23:16:41 -07:00
|
|
|
/// Most things want [`Self::insert_field`] instead, but this one is
|
2025-06-18 23:32:15 -07:00
|
|
|
/// necessary for writing things like enum tags that aren't in any variant.
|
|
|
|
|
pub(super) fn insert_imm(&mut self, f: FieldIdx, imm: V) {
|
|
|
|
|
let field_offset = self.layout.fields.offset(f.as_usize());
|
|
|
|
|
let is_zero_offset = field_offset == Size::ZERO;
|
|
|
|
|
match &mut self.val {
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::Immediate(val @ Either::Right(_)) if is_zero_offset => {
|
2025-07-01 19:21:41 +02:00
|
|
|
*val = Either::Left(imm);
|
2025-06-18 23:32:15 -07:00
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
|
2025-07-01 19:21:41 +02:00
|
|
|
*fst = Either::Left(imm);
|
2025-06-18 23:32:15 -07:00
|
|
|
}
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
|
2025-07-01 19:21:41 +02:00
|
|
|
*snd = Either::Left(imm);
|
2025-06-18 23:32:15 -07:00
|
|
|
}
|
|
|
|
|
_ => bug!("Tried to insert {imm:?} into field {f:?} of {self:?}"),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-04 23:16:41 -07:00
|
|
|
/// After having set all necessary fields, this converts the builder back
|
|
|
|
|
/// to the normal `OperandRef`.
|
2025-06-11 02:50:28 -07:00
|
|
|
///
|
|
|
|
|
/// ICEs if any required fields were not set.
|
2025-07-04 23:16:41 -07:00
|
|
|
pub(super) fn build(&self, cx: &impl CodegenMethods<'tcx, Value = V>) -> OperandRef<'tcx, V> {
|
|
|
|
|
let OperandRefBuilder { val, layout } = *self;
|
2025-06-11 02:50:28 -07:00
|
|
|
|
2025-06-18 23:32:15 -07:00
|
|
|
// For something like `Option::<u32>::None`, it's expected that the
|
|
|
|
|
// payload scalar will not actually have been set, so this converts
|
|
|
|
|
// unset scalars to corresponding `undef` values so long as the scalar
|
|
|
|
|
// from the layout allows uninit.
|
2025-07-01 19:21:41 +02:00
|
|
|
let unwrap = |r: Either<V, abi::Scalar>| match r {
|
|
|
|
|
Either::Left(v) => v,
|
|
|
|
|
Either::Right(s) if s.is_uninit_valid() => {
|
2025-06-18 23:32:15 -07:00
|
|
|
let bty = cx.type_from_scalar(s);
|
|
|
|
|
cx.const_undef(bty)
|
|
|
|
|
}
|
2025-07-01 19:21:41 +02:00
|
|
|
Either::Right(_) => bug!("OperandRef::build called while fields are missing {self:?}"),
|
2025-06-11 02:50:28 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let val = match val {
|
2025-07-04 23:16:41 -07:00
|
|
|
OperandValueBuilder::ZeroSized => OperandValue::ZeroSized,
|
|
|
|
|
OperandValueBuilder::Immediate(v) => OperandValue::Immediate(unwrap(v)),
|
|
|
|
|
OperandValueBuilder::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)),
|
|
|
|
|
OperandValueBuilder::Vector(v) => match v {
|
|
|
|
|
Either::Left(v) => OperandValue::Immediate(v),
|
|
|
|
|
Either::Right(())
|
|
|
|
|
if let BackendRepr::SimdVector { element, .. } = layout.backend_repr
|
|
|
|
|
&& element.is_uninit_valid() =>
|
|
|
|
|
{
|
|
|
|
|
let bty = cx.immediate_backend_type(layout);
|
|
|
|
|
OperandValue::Immediate(cx.const_undef(bty))
|
|
|
|
|
}
|
|
|
|
|
Either::Right(()) => {
|
|
|
|
|
bug!("OperandRef::build called while fields are missing {self:?}")
|
|
|
|
|
}
|
|
|
|
|
},
|
2025-06-11 02:50:28 -07:00
|
|
|
};
|
|
|
|
|
OperandRef { val, layout }
|
|
|
|
|
}
|
2017-09-20 18:17:23 +03:00
|
|
|
}
|
2016-02-11 18:30:34 +02:00
|
|
|
|
2019-06-14 19:39:39 +03:00
|
|
|
impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
|
2023-04-06 13:53:10 -07:00
|
|
|
/// Returns an `OperandValue` that's generally UB to use in any way.
|
|
|
|
|
///
|
2023-05-07 03:00:41 -07:00
|
|
|
/// Depending on the `layout`, returns `ZeroSized` for ZSTs, an `Immediate` or
|
|
|
|
|
/// `Pair` containing poison value(s), or a `Ref` containing a poison pointer.
|
2023-04-06 13:53:10 -07:00
|
|
|
///
|
|
|
|
|
/// Supports sized types only.
|
|
|
|
|
pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
bx: &mut Bx,
|
|
|
|
|
layout: TyAndLayout<'tcx>,
|
|
|
|
|
) -> OperandValue<V> {
|
|
|
|
|
assert!(layout.is_sized());
|
2023-05-07 03:00:41 -07:00
|
|
|
if layout.is_zst() {
|
|
|
|
|
OperandValue::ZeroSized
|
|
|
|
|
} else if bx.cx().is_backend_immediate(layout) {
|
2023-04-06 13:53:10 -07:00
|
|
|
let ibty = bx.cx().immediate_backend_type(layout);
|
|
|
|
|
OperandValue::Immediate(bx.const_poison(ibty))
|
|
|
|
|
} else if bx.cx().is_backend_scalar_pair(layout) {
|
|
|
|
|
let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
|
|
|
|
|
let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
|
|
|
|
|
OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
|
|
|
|
|
} else {
|
2023-07-28 20:24:33 -04:00
|
|
|
let ptr = bx.cx().type_ptr();
|
2024-04-10 23:08:34 -07:00
|
|
|
OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
|
2023-04-06 13:53:10 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-14 17:48:57 +02:00
|
|
|
pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
dest: PlaceRef<'tcx, V>,
|
2018-09-14 17:48:57 +02:00
|
|
|
) {
|
2018-05-11 12:26:32 +02:00
|
|
|
self.store_with_flags(bx, dest, MemFlags::empty());
|
2018-05-11 11:26:51 +02:00
|
|
|
}
|
|
|
|
|
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-08-07 17:14:40 +02:00
|
|
|
self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
dest: PlaceRef<'tcx, V>,
|
2018-08-07 17:14:40 +02:00
|
|
|
) {
|
2018-05-11 12:26:32 +02:00
|
|
|
self.store_with_flags(bx, dest, MemFlags::VOLATILE);
|
2018-05-11 11:26:51 +02:00
|
|
|
}
|
|
|
|
|
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-08-02 17:48:44 +03:00
|
|
|
self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
dest: PlaceRef<'tcx, V>,
|
2018-08-02 17:48:44 +03:00
|
|
|
) {
|
2018-07-14 23:28:39 +01:00
|
|
|
self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-08-07 17:14:40 +02:00
|
|
|
self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
dest: PlaceRef<'tcx, V>,
|
2018-08-07 17:14:40 +02:00
|
|
|
) {
|
2018-05-11 12:26:32 +02:00
|
|
|
self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-29 00:00:24 -07:00
|
|
|
pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-07-17 18:26:58 +03:00
|
|
|
self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
dest: PlaceRef<'tcx, V>,
|
2018-07-17 18:26:58 +03:00
|
|
|
flags: MemFlags,
|
|
|
|
|
) {
|
2017-06-25 12:41:24 +03:00
|
|
|
debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
|
2017-09-20 18:17:23 +03:00
|
|
|
match self {
|
2023-05-07 03:00:41 -07:00
|
|
|
OperandValue::ZeroSized => {
|
2024-09-11 09:59:50 +10:00
|
|
|
// Avoid generating stores of zero-sized values, because the only way to have a
|
|
|
|
|
// zero-sized value is through `undef`/`poison`, and the store itself is useless.
|
2023-05-07 03:00:41 -07:00
|
|
|
}
|
2024-04-10 23:08:34 -07:00
|
|
|
OperandValue::Ref(val) => {
|
2023-12-02 22:25:14 +01:00
|
|
|
assert!(dest.layout.is_sized(), "cannot directly store unsized values");
|
2024-04-10 23:08:34 -07:00
|
|
|
if val.llextra.is_some() {
|
|
|
|
|
bug!("cannot directly store unsized values");
|
|
|
|
|
}
|
2024-04-12 19:11:21 -07:00
|
|
|
bx.typed_place_copy_with_flags(dest.val, val, dest.layout, flags);
|
2018-05-11 12:26:32 +02:00
|
|
|
}
|
2017-06-25 12:41:24 +03:00
|
|
|
OperandValue::Immediate(s) => {
|
2020-08-29 18:10:01 +02:00
|
|
|
let val = bx.from_immediate(s);
|
2024-04-10 22:07:21 -07:00
|
|
|
bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
|
2017-06-25 12:41:24 +03:00
|
|
|
}
|
|
|
|
|
OperandValue::Pair(a, b) => {
|
2024-10-29 13:37:26 -07:00
|
|
|
let BackendRepr::ScalarPair(a_scalar, b_scalar) = dest.layout.backend_repr else {
|
2022-02-19 00:48:49 +01:00
|
|
|
bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
|
2018-11-28 00:25:40 +01:00
|
|
|
};
|
2022-03-03 12:02:12 +00:00
|
|
|
let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
|
2018-11-28 00:25:40 +01:00
|
|
|
|
2020-08-29 18:10:01 +02:00
|
|
|
let val = bx.from_immediate(a);
|
2024-04-10 22:07:21 -07:00
|
|
|
let align = dest.val.align;
|
|
|
|
|
bx.store_with_flags(val, dest.val.llval, align, flags);
|
2018-11-28 00:25:40 +01:00
|
|
|
|
2024-04-10 22:07:21 -07:00
|
|
|
let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
|
2020-08-29 18:10:01 +02:00
|
|
|
let val = bx.from_immediate(b);
|
2024-04-10 22:07:21 -07:00
|
|
|
let align = dest.val.align.restrict_for_offset(b_offset);
|
2018-11-28 00:25:40 +01:00
|
|
|
bx.store_with_flags(val, llptr, align, flags);
|
2017-06-25 12:41:24 +03:00
|
|
|
}
|
|
|
|
|
}
|
2016-02-11 18:30:34 +02:00
|
|
|
}
|
2017-06-25 12:41:24 +03:00
|
|
|
}
|
2016-02-11 18:30:34 +02:00
|
|
|
|
2019-06-14 19:39:39 +03:00
|
|
|
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
2018-09-20 15:47:22 +02:00
|
|
|
fn maybe_codegen_consume_direct(
|
|
|
|
|
&mut self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2020-03-04 18:25:03 -03:00
|
|
|
place_ref: mir::PlaceRef<'tcx>,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> Option<OperandRef<'tcx, Bx::Value>> {
|
2019-07-02 20:29:45 +02:00
|
|
|
debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref);
|
2016-06-09 18:13:16 +03:00
|
|
|
|
2020-01-14 02:10:05 -03:00
|
|
|
match self.locals[place_ref.local] {
|
2023-03-24 20:36:59 -07:00
|
|
|
LocalRef::Operand(mut o) => {
|
2019-12-11 16:50:03 -03:00
|
|
|
// Moves out of scalar and scalar pair fields are trivial.
|
|
|
|
|
for elem in place_ref.projection.iter() {
|
|
|
|
|
match elem {
|
2025-02-20 18:28:48 +00:00
|
|
|
mir::ProjectionElem::Field(f, _) => {
|
2024-07-19 16:52:33 -07:00
|
|
|
assert!(
|
2024-04-21 16:11:01 -07:00
|
|
|
!o.layout.ty.is_any_ptr(),
|
|
|
|
|
"Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
|
|
|
|
|
but tried to access field {f:?} of pointer {o:?}",
|
|
|
|
|
);
|
2025-02-15 16:07:18 -08:00
|
|
|
o = o.extract_field(self, bx, f.index());
|
2019-12-11 16:50:03 -03:00
|
|
|
}
|
|
|
|
|
mir::ProjectionElem::Index(_)
|
|
|
|
|
| mir::ProjectionElem::ConstantIndex { .. } => {
|
|
|
|
|
// ZSTs don't require any actual memory access.
|
|
|
|
|
// FIXME(eddyb) deduplicate this with the identical
|
|
|
|
|
// checks in `codegen_consume` and `extract_field`.
|
|
|
|
|
let elem = o.layout.field(bx.cx(), 0);
|
|
|
|
|
if elem.is_zst() {
|
2023-05-07 03:00:41 -07:00
|
|
|
o = OperandRef::zero_sized(elem);
|
2019-12-11 16:50:03 -03:00
|
|
|
} else {
|
|
|
|
|
return None;
|
2019-05-22 05:51:50 +02:00
|
|
|
}
|
|
|
|
|
}
|
2019-12-11 16:50:03 -03:00
|
|
|
_ => return None,
|
2019-12-11 10:39:24 -03:00
|
|
|
}
|
2016-05-25 11:58:08 +03:00
|
|
|
}
|
2019-12-11 16:50:03 -03:00
|
|
|
|
|
|
|
|
Some(o)
|
|
|
|
|
}
|
2023-03-24 20:36:59 -07:00
|
|
|
LocalRef::PendingOperand => {
|
2019-12-11 16:50:03 -03:00
|
|
|
bug!("use of {:?} before def", place_ref);
|
|
|
|
|
}
|
|
|
|
|
LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
|
|
|
|
|
// watch out for locals that do not have an
|
|
|
|
|
// alloca; they are handled somewhat differently
|
|
|
|
|
None
|
2016-06-09 18:13:16 +03:00
|
|
|
}
|
2019-07-30 00:07:28 +02:00
|
|
|
}
|
2017-10-09 00:38:10 +03:00
|
|
|
}
|
|
|
|
|
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn codegen_consume(
|
|
|
|
|
&mut self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2020-03-04 18:25:03 -03:00
|
|
|
place_ref: mir::PlaceRef<'tcx>,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> OperandRef<'tcx, Bx::Value> {
|
2019-07-02 20:29:45 +02:00
|
|
|
debug!("codegen_consume(place_ref={:?})", place_ref);
|
2017-10-09 00:38:10 +03:00
|
|
|
|
2019-07-02 20:29:45 +02:00
|
|
|
let ty = self.monomorphized_place_ty(place_ref);
|
2018-08-28 17:50:57 +02:00
|
|
|
let layout = bx.cx().layout_of(ty);
|
2017-10-09 00:38:10 +03:00
|
|
|
|
|
|
|
|
// ZSTs don't require any actual memory access.
|
|
|
|
|
if layout.is_zst() {
|
2023-05-07 03:00:41 -07:00
|
|
|
return OperandRef::zero_sized(layout);
|
2017-10-09 00:38:10 +03:00
|
|
|
}
|
|
|
|
|
|
2019-07-02 20:29:45 +02:00
|
|
|
if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {
|
2017-10-09 00:38:10 +03:00
|
|
|
return o;
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-01 14:39:51 +02:00
|
|
|
// for most places, to consume them we just load them
|
2016-06-09 18:13:16 +03:00
|
|
|
// out from their home
|
2019-07-02 20:29:45 +02:00
|
|
|
let place = self.codegen_place(bx, place_ref);
|
2018-10-05 15:08:49 +02:00
|
|
|
bx.load_operand(place)
|
2016-06-09 18:13:16 +03:00
|
|
|
}
|
2016-05-25 11:58:08 +03:00
|
|
|
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn codegen_operand(
|
|
|
|
|
&mut self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2018-09-20 15:47:22 +02:00
|
|
|
operand: &mir::Operand<'tcx>,
|
|
|
|
|
) -> OperandRef<'tcx, Bx::Value> {
|
2018-05-08 16:10:16 +03:00
|
|
|
debug!("codegen_operand(operand={:?})", operand);
|
2016-06-09 18:13:16 +03:00
|
|
|
|
|
|
|
|
match *operand {
|
2017-12-01 14:39:51 +02:00
|
|
|
mir::Operand::Copy(ref place) | mir::Operand::Move(ref place) => {
|
2020-01-14 01:51:59 -03:00
|
|
|
self.codegen_consume(bx, place.as_ref())
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
|
|
|
|
|
2024-08-08 11:15:03 +01:00
|
|
|
mir::Operand::Constant(ref constant) => {
|
|
|
|
|
let constant_ty = self.monomorphize(constant.ty());
|
|
|
|
|
// Most SIMD vector constants should be passed as immediates.
|
|
|
|
|
// (In particular, some intrinsics really rely on this.)
|
|
|
|
|
if constant_ty.is_simd() {
|
|
|
|
|
// However, some SIMD types do not actually use the vector ABI
|
|
|
|
|
// (in particular, packed SIMD types do not). Ensure we exclude those.
|
|
|
|
|
let layout = bx.layout_of(constant_ty);
|
2025-02-28 16:29:07 +01:00
|
|
|
if let BackendRepr::SimdVector { .. } = layout.backend_repr {
|
2024-08-08 11:15:03 +01:00
|
|
|
let (llval, ty) = self.immediate_const_vector(bx, constant);
|
|
|
|
|
return OperandRef {
|
|
|
|
|
val: OperandValue::Immediate(llval),
|
|
|
|
|
layout: bx.layout_of(ty),
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
self.eval_mir_constant_to_operand(bx, constant)
|
|
|
|
|
}
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|