2019-05-17 02:20:14 +01:00
|
|
|
use super::operand::OperandValue;
|
2019-12-24 17:38:22 -05:00
|
|
|
use super::{FunctionCx, LocalRef};
|
2019-05-17 02:20:14 +01:00
|
|
|
|
2019-02-09 23:31:47 +09:00
|
|
|
use crate::common::IntPredicate;
|
|
|
|
|
use crate::glue;
|
|
|
|
|
use crate::traits::*;
|
2019-12-24 17:38:22 -05:00
|
|
|
use crate::MemFlags;
|
2018-08-07 17:14:40 +02:00
|
|
|
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir;
|
|
|
|
|
use rustc_middle::mir::tcx::PlaceTy;
|
2020-03-31 18:16:47 +02:00
|
|
|
use rustc_middle::ty::layout::{HasTyCtxt, TyAndLayout};
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::ty::{self, Ty};
|
2020-05-23 13:22:45 +02:00
|
|
|
use rustc_target::abi::{Abi, Align, FieldsShape, Int, TagEncoding};
|
2020-03-31 18:16:47 +02:00
|
|
|
use rustc_target::abi::{LayoutOf, VariantIdx, Variants};
|
2015-10-21 17:42:25 -04:00
|
|
|
|
2016-03-11 12:54:59 +02:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2018-08-02 17:48:44 +03:00
|
|
|
pub struct PlaceRef<'tcx, V> {
|
2019-05-17 02:20:14 +01:00
|
|
|
/// A pointer to the contents of the place.
|
2018-08-02 17:48:44 +03:00
|
|
|
pub llval: V,
|
2015-10-21 17:42:25 -04:00
|
|
|
|
2019-05-17 02:20:14 +01:00
|
|
|
/// This place's extra data if it is unsized, or `None` if null.
|
2018-08-02 17:48:44 +03:00
|
|
|
pub llextra: Option<V>,
|
2015-11-10 22:05:11 +02:00
|
|
|
|
2019-05-17 02:20:14 +01:00
|
|
|
/// The monomorphized type of this place, including variant information.
|
2020-03-04 14:50:21 +00:00
|
|
|
pub layout: TyAndLayout<'tcx>,
|
2017-02-06 17:27:09 +01:00
|
|
|
|
2019-05-17 02:20:14 +01:00
|
|
|
/// The alignment we know for this place.
|
2018-09-09 01:16:45 +03:00
|
|
|
pub align: Align,
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
|
|
|
|
|
2019-06-14 19:39:39 +03:00
|
|
|
impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
|
2020-03-04 14:50:21 +00:00
|
|
|
pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
|
2019-08-29 14:24:50 -04:00
|
|
|
assert!(!layout.is_unsized());
|
2019-12-24 17:38:22 -05:00
|
|
|
PlaceRef { llval, llextra: None, layout, align: layout.align.abi }
|
2019-08-29 14:24:50 -04:00
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
pub fn new_sized_aligned(
|
|
|
|
|
llval: V,
|
|
|
|
|
layout: TyAndLayout<'tcx>,
|
|
|
|
|
align: Align,
|
|
|
|
|
) -> PlaceRef<'tcx, V> {
|
2018-05-29 00:12:55 +09:00
|
|
|
assert!(!layout.is_unsized());
|
2019-12-24 17:38:22 -05:00
|
|
|
PlaceRef { llval, llextra: None, layout, align }
|
2017-01-01 15:50:15 -07:00
|
|
|
}
|
|
|
|
|
|
2019-09-13 10:28:14 +03:00
|
|
|
// FIXME(eddyb) pass something else for the name so no work is done
|
|
|
|
|
// unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`).
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2020-03-04 14:50:21 +00:00
|
|
|
layout: TyAndLayout<'tcx>,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> Self {
|
2018-05-29 00:12:55 +09:00
|
|
|
assert!(!layout.is_unsized(), "tried to statically allocate unsized place");
|
2019-09-12 19:04:30 +03:00
|
|
|
let tmp = bx.alloca(bx.cx().backend_type(layout), layout.align.abi);
|
2019-08-29 14:24:50 -04:00
|
|
|
Self::new_sized(tmp, layout)
|
2017-02-06 17:27:09 +01:00
|
|
|
}
|
|
|
|
|
|
2018-05-29 00:12:55 +09:00
|
|
|
/// Returns a place for an indirect reference to an unsized place.
|
2019-09-13 10:28:14 +03:00
|
|
|
// FIXME(eddyb) pass something else for the name so no work is done
|
|
|
|
|
// unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`).
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn alloca_unsized_indirect<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2020-03-04 14:50:21 +00:00
|
|
|
layout: TyAndLayout<'tcx>,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> Self {
|
2018-05-29 00:12:55 +09:00
|
|
|
assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
|
2018-09-20 15:47:22 +02:00
|
|
|
let ptr_ty = bx.cx().tcx().mk_mut_ptr(layout.ty);
|
2018-08-28 17:50:57 +02:00
|
|
|
let ptr_layout = bx.cx().layout_of(ptr_ty);
|
2019-09-12 19:04:30 +03:00
|
|
|
Self::alloca(bx, ptr_layout)
|
2018-05-29 00:12:55 +09:00
|
|
|
}
|
|
|
|
|
|
2019-12-24 17:38:22 -05:00
|
|
|
pub fn len<Cx: ConstMethods<'tcx, Value = V>>(&self, cx: &Cx) -> V {
|
2020-03-31 18:16:47 +02:00
|
|
|
if let FieldsShape::Array { count, .. } = self.layout.fields {
|
2017-09-20 18:17:23 +03:00
|
|
|
if self.layout.is_unsized() {
|
|
|
|
|
assert_eq!(count, 0);
|
2018-07-10 13:28:39 +03:00
|
|
|
self.llextra.unwrap()
|
2017-09-20 18:17:23 +03:00
|
|
|
} else {
|
2018-09-06 11:57:42 -07:00
|
|
|
cx.const_usize(count)
|
2016-04-21 16:15:56 +03:00
|
|
|
}
|
2017-09-20 18:17:23 +03:00
|
|
|
} else {
|
2017-12-01 14:31:47 +02:00
|
|
|
bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
|
2016-04-21 16:15:56 +03:00
|
|
|
}
|
|
|
|
|
}
|
2018-09-14 17:48:57 +02:00
|
|
|
}
|
2017-06-25 12:41:24 +03:00
|
|
|
|
2019-06-14 19:39:39 +03:00
|
|
|
impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
|
2017-04-25 14:39:00 +03:00
|
|
|
/// Access a field, at a point when the value's case is known.
|
2018-09-14 17:48:57 +02:00
|
|
|
pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2019-12-24 17:38:22 -05:00
|
|
|
self,
|
|
|
|
|
bx: &mut Bx,
|
2018-08-02 17:48:44 +03:00
|
|
|
ix: usize,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> Self {
|
2018-10-05 15:08:49 +02:00
|
|
|
let field = self.layout.field(bx.cx(), ix);
|
2017-10-06 10:25:35 +03:00
|
|
|
let offset = self.layout.fields.offset(ix);
|
2018-09-09 01:16:45 +03:00
|
|
|
let effective_field_align = self.align.restrict_for_offset(offset);
|
2017-06-25 12:41:24 +03:00
|
|
|
|
2018-10-05 15:08:49 +02:00
|
|
|
let mut simple = || {
|
2020-08-16 19:25:39 -04:00
|
|
|
let llval = match self.layout.abi {
|
|
|
|
|
_ if offset.bytes() == 0 => {
|
|
|
|
|
// Unions and newtypes only use an offset of 0.
|
2020-08-20 18:59:52 -04:00
|
|
|
// Also handles the first field of Scalar, ScalarPair, and Vector layouts.
|
2020-08-16 19:25:39 -04:00
|
|
|
self.llval
|
|
|
|
|
}
|
|
|
|
|
Abi::ScalarPair(ref a, ref b)
|
|
|
|
|
if offset == a.value.size(bx.cx()).align_to(b.value.align(bx.cx()).abi) =>
|
|
|
|
|
{
|
|
|
|
|
// Offset matches second field.
|
|
|
|
|
bx.struct_gep(self.llval, 1)
|
|
|
|
|
}
|
2020-08-20 18:59:52 -04:00
|
|
|
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } if field.is_zst() => {
|
|
|
|
|
// ZST fields are not included in Scalar, ScalarPair, and Vector layouts, so manually offset the pointer.
|
2020-08-16 19:25:39 -04:00
|
|
|
let byte_ptr = bx.pointercast(self.llval, bx.cx().type_i8p());
|
|
|
|
|
bx.gep(byte_ptr, &[bx.const_usize(offset.bytes())])
|
|
|
|
|
}
|
2020-08-20 18:59:52 -04:00
|
|
|
Abi::Scalar(_) | Abi::ScalarPair(..) => {
|
|
|
|
|
// All fields of Scalar and ScalarPair layouts must have been handled by this point.
|
|
|
|
|
// Vector layouts have additional fields for each element of the vector, so don't panic in that case.
|
|
|
|
|
bug!(
|
|
|
|
|
"offset of non-ZST field `{:?}` does not match layout `{:#?}`",
|
|
|
|
|
field,
|
|
|
|
|
self.layout
|
|
|
|
|
);
|
|
|
|
|
}
|
2020-08-16 19:25:39 -04:00
|
|
|
_ => bx.struct_gep(self.llval, bx.cx().backend_field_index(self.layout, ix)),
|
2017-10-06 10:25:35 +03:00
|
|
|
};
|
2017-12-01 14:31:47 +02:00
|
|
|
PlaceRef {
|
2019-02-28 22:43:53 +00:00
|
|
|
// HACK(eddyb): have to bitcast pointers until LLVM removes pointee types.
|
2018-10-05 15:08:49 +02:00
|
|
|
llval: bx.pointercast(llval, bx.cx().type_ptr_to(bx.cx().backend_type(field))),
|
2019-12-24 17:38:22 -05:00
|
|
|
llextra: if bx.cx().type_has_metadata(field.ty) { self.llextra } else { None },
|
2017-09-20 18:17:23 +03:00
|
|
|
layout: field,
|
2018-09-09 01:16:45 +03:00
|
|
|
align: effective_field_align,
|
2017-06-25 12:41:24 +03:00
|
|
|
}
|
|
|
|
|
};
|
2017-02-06 17:27:09 +01:00
|
|
|
|
2017-12-01 18:29:35 +02:00
|
|
|
// Simple cases, which don't need DST adjustment:
|
|
|
|
|
// * no metadata available - just log the case
|
2019-02-28 22:43:53 +00:00
|
|
|
// * known alignment - sized types, `[T]`, `str` or a foreign type
|
2017-12-01 18:29:35 +02:00
|
|
|
// * packed struct - there is no alignment padding
|
2020-08-03 00:49:11 +02:00
|
|
|
match field.ty.kind() {
|
2018-07-10 13:28:39 +03:00
|
|
|
_ if self.llextra.is_none() => {
|
2019-12-24 17:38:22 -05:00
|
|
|
debug!(
|
|
|
|
|
"unsized field `{}`, of `{:?}` has no metadata for adjustment",
|
|
|
|
|
ix, self.llval
|
|
|
|
|
);
|
2017-12-01 18:29:35 +02:00
|
|
|
return simple();
|
|
|
|
|
}
|
|
|
|
|
_ if !field.is_unsized() => return simple(),
|
2018-08-22 01:35:55 +01:00
|
|
|
ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(),
|
2018-08-22 01:35:02 +01:00
|
|
|
ty::Adt(def, _) => {
|
2017-12-01 18:29:35 +02:00
|
|
|
if def.repr.packed() {
|
|
|
|
|
// FIXME(eddyb) generalize the adjustment when we
|
|
|
|
|
// start supporting packing to larger alignments.
|
2018-09-09 00:22:22 +03:00
|
|
|
assert_eq!(self.layout.align.abi.bytes(), 1);
|
2017-12-01 18:29:35 +02:00
|
|
|
return simple();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
2017-01-02 12:13:59 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// We need to get the pointer manually now.
|
2019-02-28 22:43:53 +00:00
|
|
|
// We do this by casting to a `*i8`, then offsetting it by the appropriate amount.
|
2017-01-02 12:13:59 -07:00
|
|
|
// We do this instead of, say, simply adjusting the pointer from the result of a GEP
|
|
|
|
|
// because the field may have an arbitrary alignment in the LLVM representation
|
|
|
|
|
// anyway.
|
|
|
|
|
//
|
|
|
|
|
// To demonstrate:
|
|
|
|
|
//
|
2019-02-28 22:43:53 +00:00
|
|
|
// struct Foo<T: ?Sized> {
|
|
|
|
|
// x: u16,
|
|
|
|
|
// y: T
|
|
|
|
|
// }
|
|
|
|
|
//
|
|
|
|
|
// The type `Foo<Foo<Trait>>` is represented in LLVM as `{ u16, { u16, u8 }}`, meaning that
|
2017-01-02 12:13:59 -07:00
|
|
|
// the `y` field has 16-bit alignment.
|
|
|
|
|
|
|
|
|
|
let meta = self.llextra;
|
|
|
|
|
|
2018-10-05 15:08:49 +02:00
|
|
|
let unaligned_offset = bx.cx().const_usize(offset.bytes());
|
2017-01-02 12:13:59 -07:00
|
|
|
|
|
|
|
|
// Get the alignment of the field
|
2018-01-05 07:12:32 +02:00
|
|
|
let (_, unsized_align) = glue::size_and_align_of_dst(bx, field.ty, meta);
|
2017-01-02 12:13:59 -07:00
|
|
|
|
|
|
|
|
// Bump the unaligned offset up to the appropriate alignment using the
|
|
|
|
|
// following expression:
|
|
|
|
|
//
|
2019-02-28 22:43:53 +00:00
|
|
|
// (unaligned offset + (align - 1)) & -align
|
2017-01-02 12:13:59 -07:00
|
|
|
|
2019-02-28 22:43:53 +00:00
|
|
|
// Calculate offset.
|
2018-10-05 15:08:49 +02:00
|
|
|
let align_sub_1 = bx.sub(unsized_align, bx.cx().const_usize(1u64));
|
|
|
|
|
let and_lhs = bx.add(unaligned_offset, align_sub_1);
|
|
|
|
|
let and_rhs = bx.neg(unsized_align);
|
|
|
|
|
let offset = bx.and(and_lhs, and_rhs);
|
2017-01-02 12:13:59 -07:00
|
|
|
|
2018-07-10 13:28:39 +03:00
|
|
|
debug!("struct_field_ptr: DST field offset: {:?}", offset);
|
2017-01-02 12:13:59 -07:00
|
|
|
|
2019-02-28 22:43:53 +00:00
|
|
|
// Cast and adjust pointer.
|
2018-10-05 15:08:49 +02:00
|
|
|
let byte_ptr = bx.pointercast(self.llval, bx.cx().type_i8p());
|
2018-01-05 07:12:32 +02:00
|
|
|
let byte_ptr = bx.gep(byte_ptr, &[offset]);
|
2017-01-02 12:13:59 -07:00
|
|
|
|
2019-02-28 22:43:53 +00:00
|
|
|
// Finally, cast back to the type expected.
|
2018-10-05 15:08:49 +02:00
|
|
|
let ll_fty = bx.cx().backend_type(field);
|
2017-01-02 12:13:59 -07:00
|
|
|
debug!("struct_field_ptr: Field type is {:?}", ll_fty);
|
2017-06-25 12:41:24 +03:00
|
|
|
|
2017-12-01 14:31:47 +02:00
|
|
|
PlaceRef {
|
2018-09-06 13:52:15 -07:00
|
|
|
llval: bx.pointercast(byte_ptr, bx.cx().type_ptr_to(ll_fty)),
|
2017-06-25 12:41:24 +03:00
|
|
|
llextra: self.llextra,
|
2017-09-20 18:17:23 +03:00
|
|
|
layout: field,
|
2018-09-09 01:16:45 +03:00
|
|
|
align: effective_field_align,
|
2017-06-25 12:41:24 +03:00
|
|
|
}
|
2017-01-02 12:13:59 -07:00
|
|
|
}
|
|
|
|
|
|
2017-04-25 14:39:00 +03:00
|
|
|
/// Obtain the actual discriminant of a value.
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
2018-08-07 17:14:40 +02:00
|
|
|
self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2019-12-24 17:38:22 -05:00
|
|
|
cast_to: Ty<'tcx>,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> V {
|
|
|
|
|
let cast_to = bx.cx().immediate_backend_type(bx.cx().layout_of(cast_to));
|
2018-08-23 16:34:38 +02:00
|
|
|
if self.layout.abi.is_uninhabited() {
|
2018-09-06 11:57:42 -07:00
|
|
|
return bx.cx().const_undef(cast_to);
|
2018-04-11 17:25:18 +02:00
|
|
|
}
|
2020-05-23 13:22:45 +02:00
|
|
|
let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
|
2020-03-31 18:16:47 +02:00
|
|
|
Variants::Single { index } => {
|
2019-12-24 17:38:22 -05:00
|
|
|
let discr_val = self
|
|
|
|
|
.layout
|
|
|
|
|
.ty
|
|
|
|
|
.discriminant_for_variant(bx.cx().tcx(), index)
|
2019-04-18 14:39:03 -07:00
|
|
|
.map_or(index.as_u32() as u128, |discr| discr.val);
|
2018-09-06 11:57:42 -07:00
|
|
|
return bx.cx().const_uint_big(cast_to, discr_val);
|
2017-09-23 01:54:45 +03:00
|
|
|
}
|
2020-05-23 13:22:45 +02:00
|
|
|
Variants::Multiple { ref tag, ref tag_encoding, tag_field, .. } => {
|
|
|
|
|
(tag, tag_encoding, tag_field)
|
2019-03-29 07:44:54 +02:00
|
|
|
}
|
|
|
|
|
};
|
2017-09-16 16:40:29 +03:00
|
|
|
|
2019-07-01 19:14:29 +03:00
|
|
|
// Read the tag/niche-encoded discriminant from memory.
|
2020-05-23 13:22:45 +02:00
|
|
|
let tag = self.project_field(bx, tag_field);
|
|
|
|
|
let tag = bx.load_operand(tag);
|
2019-07-01 19:14:29 +03:00
|
|
|
|
|
|
|
|
// Decode the discriminant (specifically if it's niche-encoded).
|
2020-05-23 13:22:45 +02:00
|
|
|
match *tag_encoding {
|
|
|
|
|
TagEncoding::Direct => {
|
|
|
|
|
let signed = match tag_scalar.value {
|
2018-06-16 14:19:05 +03:00
|
|
|
// We use `i1` for bytes that are always `0` or `1`,
|
2018-11-27 02:59:49 +00:00
|
|
|
// e.g., `#[repr(i8)] enum E { A, B }`, but we can't
|
2018-06-16 14:19:05 +03:00
|
|
|
// let LLVM interpret the `i1` as signed, because
|
2019-02-28 22:43:53 +00:00
|
|
|
// then `i1 1` (i.e., `E::B`) is effectively `i8 -1`.
|
2020-05-23 13:22:45 +02:00
|
|
|
Int(_, signed) => !tag_scalar.is_bool() && signed,
|
2019-12-24 17:38:22 -05:00
|
|
|
_ => false,
|
2017-09-16 16:40:29 +03:00
|
|
|
};
|
2020-05-23 13:22:45 +02:00
|
|
|
bx.intcast(tag.immediate(), cast_to, signed)
|
2017-01-02 12:13:59 -07:00
|
|
|
}
|
2020-05-23 13:22:45 +02:00
|
|
|
TagEncoding::Niche { dataful_variant, ref niche_variants, niche_start } => {
|
2019-07-04 11:18:06 +03:00
|
|
|
// Rebase from niche values to discriminants, and check
|
|
|
|
|
// whether the result is in range for the niche variants.
|
2020-05-23 13:22:45 +02:00
|
|
|
let niche_llty = bx.cx().immediate_backend_type(tag.layout);
|
|
|
|
|
let tag = tag.immediate();
|
2019-07-04 11:18:06 +03:00
|
|
|
|
|
|
|
|
// We first compute the "relative discriminant" (wrt `niche_variants`),
|
|
|
|
|
// that is, if `n = niche_variants.end() - niche_variants.start()`,
|
|
|
|
|
// we remap `niche_start..=niche_start + n` (which may wrap around)
|
|
|
|
|
// to (non-wrap-around) `0..=n`, to be able to check whether the
|
|
|
|
|
// discriminant corresponds to a niche variant with one comparison.
|
|
|
|
|
// We also can't go directly to the (variant index) discriminant
|
|
|
|
|
// and check that it is in the range `niche_variants`, because
|
|
|
|
|
// that might not fit in the same type, on top of needing an extra
|
|
|
|
|
// comparison (see also the comment on `let niche_discr`).
|
|
|
|
|
let relative_discr = if niche_start == 0 {
|
|
|
|
|
// Avoid subtracting `0`, which wouldn't work for pointers.
|
|
|
|
|
// FIXME(eddyb) check the actual primitive type here.
|
2020-05-23 13:22:45 +02:00
|
|
|
tag
|
2019-07-04 11:18:06 +03:00
|
|
|
} else {
|
2020-05-23 13:22:45 +02:00
|
|
|
bx.sub(tag, bx.cx().const_uint_big(niche_llty, niche_start))
|
2019-07-04 11:18:06 +03:00
|
|
|
};
|
|
|
|
|
let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
|
2020-07-19 20:57:49 +02:00
|
|
|
let is_niche = if relative_max == 0 {
|
|
|
|
|
// Avoid calling `const_uint`, which wouldn't work for pointers.
|
|
|
|
|
// Also use canonical == 0 instead of non-canonical u<= 0.
|
|
|
|
|
// FIXME(eddyb) check the actual primitive type here.
|
|
|
|
|
bx.icmp(IntPredicate::IntEQ, relative_discr, bx.cx().const_null(niche_llty))
|
|
|
|
|
} else {
|
|
|
|
|
let relative_max = bx.cx().const_uint(niche_llty, relative_max as u64);
|
2019-07-04 11:18:06 +03:00
|
|
|
bx.icmp(IntPredicate::IntULE, relative_discr, relative_max)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// NOTE(eddyb) this addition needs to be performed on the final
|
|
|
|
|
// type, in case the niche itself can't represent all variant
|
|
|
|
|
// indices (e.g. `u8` niche with more than `256` variants,
|
|
|
|
|
// but enough uninhabited variants so that the remaining variants
|
|
|
|
|
// fit in the niche).
|
|
|
|
|
// In other words, `niche_variants.end - niche_variants.start`
|
|
|
|
|
// is representable in the niche, but `niche_variants.end`
|
|
|
|
|
// might not be, in extreme cases.
|
|
|
|
|
let niche_discr = {
|
|
|
|
|
let relative_discr = if relative_max == 0 {
|
|
|
|
|
// HACK(eddyb) since we have only one niche, we know which
|
|
|
|
|
// one it is, and we can avoid having a dynamic value here.
|
|
|
|
|
bx.cx().const_uint(cast_to, 0)
|
|
|
|
|
} else {
|
|
|
|
|
bx.intcast(relative_discr, cast_to, false)
|
2017-10-12 03:55:49 +03:00
|
|
|
};
|
2019-07-04 11:18:06 +03:00
|
|
|
bx.add(
|
|
|
|
|
relative_discr,
|
2018-09-06 11:57:42 -07:00
|
|
|
bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64),
|
2019-07-01 19:14:29 +03:00
|
|
|
)
|
|
|
|
|
};
|
2019-07-04 11:18:06 +03:00
|
|
|
|
2019-07-01 19:14:29 +03:00
|
|
|
bx.select(
|
|
|
|
|
is_niche,
|
|
|
|
|
niche_discr,
|
|
|
|
|
bx.cx().const_uint(cast_to, dataful_variant.as_u32() as u64),
|
|
|
|
|
)
|
2017-09-16 16:40:29 +03:00
|
|
|
}
|
|
|
|
|
}
|
2017-04-25 14:39:00 +03:00
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Sets the discriminant for a new value of the given case of the given
|
2017-04-25 14:39:00 +03:00
|
|
|
/// representation.
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
&self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2019-12-24 17:38:22 -05:00
|
|
|
variant_index: VariantIdx,
|
2018-09-20 15:47:22 +02:00
|
|
|
) {
|
2018-08-28 17:50:57 +02:00
|
|
|
if self.layout.for_variant(bx.cx(), variant_index).abi.is_uninhabited() {
|
2019-12-05 23:59:30 +01:00
|
|
|
// We play it safe by using a well-defined `abort`, but we could go for immediate UB
|
|
|
|
|
// if that turns out to be helpful.
|
|
|
|
|
bx.abort();
|
2017-12-09 20:53:10 +01:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
match self.layout.variants {
|
2020-03-31 18:16:47 +02:00
|
|
|
Variants::Single { index } => {
|
2017-12-05 22:37:51 +01:00
|
|
|
assert_eq!(index, variant_index);
|
2017-09-23 01:54:45 +03:00
|
|
|
}
|
2020-05-23 13:22:45 +02:00
|
|
|
Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
|
|
|
|
|
let ptr = self.project_field(bx, tag_field);
|
2019-04-18 14:39:03 -07:00
|
|
|
let to =
|
|
|
|
|
self.layout.ty.discriminant_for_variant(bx.tcx(), variant_index).unwrap().val;
|
2018-04-11 17:30:49 +02:00
|
|
|
bx.store(
|
2018-09-20 15:47:22 +02:00
|
|
|
bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to),
|
2018-04-11 17:30:49 +02:00
|
|
|
ptr.llval,
|
2019-12-24 17:38:22 -05:00
|
|
|
ptr.align,
|
|
|
|
|
);
|
2017-01-02 12:13:59 -07:00
|
|
|
}
|
2020-03-31 18:16:47 +02:00
|
|
|
Variants::Multiple {
|
2020-05-23 13:22:45 +02:00
|
|
|
tag_encoding:
|
|
|
|
|
TagEncoding::Niche { dataful_variant, ref niche_variants, niche_start },
|
|
|
|
|
tag_field,
|
2017-10-12 03:55:49 +03:00
|
|
|
..
|
|
|
|
|
} => {
|
2017-09-24 12:12:26 +03:00
|
|
|
if variant_index != dataful_variant {
|
2019-12-24 17:38:22 -05:00
|
|
|
if bx.cx().sess().target.target.arch == "arm"
|
|
|
|
|
|| bx.cx().sess().target.target.arch == "aarch64"
|
|
|
|
|
{
|
2019-02-28 22:43:53 +00:00
|
|
|
// FIXME(#34427): as workaround for LLVM bug on ARM,
|
2017-09-24 12:12:26 +03:00
|
|
|
// use memset of 0 before assigning niche value.
|
2018-09-06 11:57:42 -07:00
|
|
|
let fill_byte = bx.cx().const_u8(0);
|
2018-09-08 22:14:55 +03:00
|
|
|
let size = bx.cx().const_usize(self.layout.size.bytes());
|
|
|
|
|
bx.memset(self.llval, fill_byte, size, self.align, MemFlags::empty());
|
2017-04-25 14:39:00 +03:00
|
|
|
}
|
2017-09-24 12:12:26 +03:00
|
|
|
|
2020-05-23 13:22:45 +02:00
|
|
|
let niche = self.project_field(bx, tag_field);
|
2018-09-20 15:47:22 +02:00
|
|
|
let niche_llty = bx.cx().immediate_backend_type(niche.layout);
|
2018-11-01 19:03:38 +01:00
|
|
|
let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
|
2019-12-24 17:38:22 -05:00
|
|
|
let niche_value = (niche_value as u128).wrapping_add(niche_start);
|
2019-02-28 22:43:53 +00:00
|
|
|
// FIXME(eddyb): check the actual primitive type here.
|
2017-09-24 12:12:26 +03:00
|
|
|
let niche_llval = if niche_value == 0 {
|
2019-02-28 22:43:53 +00:00
|
|
|
// HACK(eddyb): using `c_null` as it works on all types.
|
2018-09-06 11:57:42 -07:00
|
|
|
bx.cx().const_null(niche_llty)
|
2017-09-24 12:12:26 +03:00
|
|
|
} else {
|
2018-09-06 11:57:42 -07:00
|
|
|
bx.cx().const_uint_big(niche_llty, niche_value)
|
2017-09-24 12:12:26 +03:00
|
|
|
};
|
2018-01-05 07:12:32 +02:00
|
|
|
OperandValue::Immediate(niche_llval).store(bx, niche);
|
2017-04-25 14:39:00 +03:00
|
|
|
}
|
2017-01-02 12:13:59 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-03-14 01:08:21 +02:00
|
|
|
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
&self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2019-12-24 17:38:22 -05:00
|
|
|
llindex: V,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> Self {
|
2018-12-21 23:12:15 +01:00
|
|
|
// Statically compute the offset if we can, otherwise just use the element size,
|
|
|
|
|
// as this will yield the lowest alignment.
|
|
|
|
|
let layout = self.layout.field(bx, 0);
|
2019-08-27 11:51:53 +02:00
|
|
|
let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
|
|
|
|
|
layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
|
2018-12-21 23:12:15 +01:00
|
|
|
} else {
|
|
|
|
|
layout.size
|
|
|
|
|
};
|
|
|
|
|
|
2017-12-01 14:31:47 +02:00
|
|
|
PlaceRef {
|
2018-09-06 11:57:42 -07:00
|
|
|
llval: bx.inbounds_gep(self.llval, &[bx.cx().const_usize(0), llindex]),
|
2018-07-10 13:28:39 +03:00
|
|
|
llextra: None,
|
2018-12-21 23:12:15 +01:00
|
|
|
layout,
|
|
|
|
|
align: self.align.restrict_for_offset(offset),
|
2017-09-20 18:17:23 +03:00
|
|
|
}
|
2017-06-25 12:41:24 +03:00
|
|
|
}
|
|
|
|
|
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
|
|
|
|
&self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2019-12-24 17:38:22 -05:00
|
|
|
variant_index: VariantIdx,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> Self {
|
2017-09-20 18:17:23 +03:00
|
|
|
let mut downcast = *self;
|
2018-08-28 17:50:57 +02:00
|
|
|
downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
|
2017-06-25 12:41:24 +03:00
|
|
|
|
2017-09-21 20:40:50 +03:00
|
|
|
// Cast to the appropriate variant struct type.
|
2018-09-20 15:47:22 +02:00
|
|
|
let variant_ty = bx.cx().backend_type(downcast.layout);
|
2018-09-06 13:52:15 -07:00
|
|
|
downcast.llval = bx.pointercast(downcast.llval, bx.cx().type_ptr_to(variant_ty));
|
2017-09-20 18:17:23 +03:00
|
|
|
|
|
|
|
|
downcast
|
2017-03-14 01:08:21 +02:00
|
|
|
}
|
2017-06-01 21:50:53 +03:00
|
|
|
|
2018-10-05 15:08:49 +02:00
|
|
|
pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
|
2018-01-05 07:12:32 +02:00
|
|
|
bx.lifetime_start(self.llval, self.layout.size);
|
2017-06-01 21:50:53 +03:00
|
|
|
}
|
|
|
|
|
|
2018-10-05 15:08:49 +02:00
|
|
|
pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
|
2018-01-05 07:12:32 +02:00
|
|
|
bx.lifetime_end(self.llval, self.layout.size);
|
2017-06-01 21:50:53 +03:00
|
|
|
}
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
|
|
|
|
|
2019-10-26 01:41:17 -04:00
|
|
|
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
2018-09-20 15:47:22 +02:00
|
|
|
pub fn codegen_place(
|
|
|
|
|
&mut self,
|
2018-10-05 15:08:49 +02:00
|
|
|
bx: &mut Bx,
|
2020-03-04 18:25:03 -03:00
|
|
|
place_ref: mir::PlaceRef<'tcx>,
|
2018-09-20 15:47:22 +02:00
|
|
|
) -> PlaceRef<'tcx, Bx::Value> {
|
2019-07-02 20:29:45 +02:00
|
|
|
debug!("codegen_place(place_ref={:?})", place_ref);
|
2018-10-05 15:08:49 +02:00
|
|
|
let cx = self.cx;
|
|
|
|
|
let tcx = self.cx.tcx();
|
2016-06-20 23:55:14 +03:00
|
|
|
|
2018-12-23 19:00:58 +00:00
|
|
|
let result = match place_ref {
|
2020-01-14 02:10:05 -03:00
|
|
|
mir::PlaceRef { local, projection: [] } => match self.locals[local] {
|
2019-12-11 16:50:03 -03:00
|
|
|
LocalRef::Place(place) => {
|
|
|
|
|
return place;
|
2016-06-20 23:55:14 +03:00
|
|
|
}
|
2019-12-11 16:50:03 -03:00
|
|
|
LocalRef::UnsizedPlace(place) => {
|
|
|
|
|
return bx.load_operand(place).deref(cx);
|
|
|
|
|
}
|
|
|
|
|
LocalRef::Operand(..) => {
|
|
|
|
|
bug!("using operand local {:?} as place", place_ref);
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
mir::PlaceRef { local, projection: [proj_base @ .., mir::ProjectionElem::Deref] } => {
|
2016-06-09 18:14:47 +03:00
|
|
|
// Load the pointer from its location.
|
2020-01-14 01:51:59 -03:00
|
|
|
self.codegen_consume(bx, mir::PlaceRef { local, projection: proj_base })
|
2019-12-24 17:38:22 -05:00
|
|
|
.deref(bx.cx())
|
2016-06-09 18:14:47 +03:00
|
|
|
}
|
2020-05-23 12:02:54 +02:00
|
|
|
mir::PlaceRef { local, projection: &[ref proj_base @ .., elem] } => {
|
2019-04-30 18:58:24 +02:00
|
|
|
// FIXME turn this recursion into iteration
|
2019-12-24 17:38:22 -05:00
|
|
|
let cg_base =
|
2020-01-14 01:51:59 -03:00
|
|
|
self.codegen_place(bx, mir::PlaceRef { local, projection: proj_base });
|
2016-03-08 14:13:56 +02:00
|
|
|
|
2019-07-30 00:07:28 +02:00
|
|
|
match elem {
|
2016-06-09 18:14:47 +03:00
|
|
|
mir::ProjectionElem::Deref => bug!(),
|
2016-02-11 18:31:42 +02:00
|
|
|
mir::ProjectionElem::Field(ref field, _) => {
|
2018-05-08 16:10:16 +03:00
|
|
|
cg_base.project_field(bx, field.index())
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
2017-09-03 21:55:41 +03:00
|
|
|
mir::ProjectionElem::Index(index) => {
|
2020-05-23 12:02:54 +02:00
|
|
|
let index = &mir::Operand::Copy(mir::Place::from(index));
|
2019-10-14 01:38:38 -04:00
|
|
|
let index = self.codegen_operand(bx, index);
|
2017-06-01 21:50:53 +03:00
|
|
|
let llindex = index.immediate();
|
2018-05-08 16:10:16 +03:00
|
|
|
cg_base.project_index(bx, llindex)
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
2019-12-24 17:38:22 -05:00
|
|
|
mir::ProjectionElem::ConstantIndex {
|
|
|
|
|
offset,
|
|
|
|
|
from_end: false,
|
|
|
|
|
min_length: _,
|
|
|
|
|
} => {
|
2020-05-23 12:02:54 +02:00
|
|
|
let lloffset = bx.cx().const_usize(offset as u64);
|
2018-05-08 16:10:16 +03:00
|
|
|
cg_base.project_index(bx, lloffset)
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
2019-12-24 17:38:22 -05:00
|
|
|
mir::ProjectionElem::ConstantIndex {
|
|
|
|
|
offset,
|
|
|
|
|
from_end: true,
|
|
|
|
|
min_length: _,
|
|
|
|
|
} => {
|
2020-05-23 12:02:54 +02:00
|
|
|
let lloffset = bx.cx().const_usize(offset as u64);
|
2018-08-28 17:50:57 +02:00
|
|
|
let lllen = cg_base.len(bx.cx());
|
2018-01-05 07:12:32 +02:00
|
|
|
let llindex = bx.sub(lllen, lloffset);
|
2018-05-08 16:10:16 +03:00
|
|
|
cg_base.project_index(bx, llindex)
|
2016-03-11 12:54:59 +02:00
|
|
|
}
|
2019-11-22 20:28:02 +00:00
|
|
|
mir::ProjectionElem::Subslice { from, to, from_end } => {
|
2019-12-24 17:38:22 -05:00
|
|
|
let mut subslice =
|
2020-05-23 12:02:54 +02:00
|
|
|
cg_base.project_index(bx, bx.cx().const_usize(from as u64));
|
2019-12-24 17:38:22 -05:00
|
|
|
let projected_ty =
|
|
|
|
|
PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, elem).ty;
|
2018-08-28 17:50:57 +02:00
|
|
|
subslice.layout = bx.cx().layout_of(self.monomorphize(&projected_ty));
|
2017-09-20 18:17:23 +03:00
|
|
|
|
|
|
|
|
if subslice.layout.is_unsized() {
|
2019-11-22 20:28:02 +00:00
|
|
|
assert!(from_end, "slice subslices should be `from_end`");
|
2019-12-24 17:38:22 -05:00
|
|
|
subslice.llextra = Some(bx.sub(
|
|
|
|
|
cg_base.llextra.unwrap(),
|
2020-05-23 12:02:54 +02:00
|
|
|
bx.cx().const_usize((from as u64) + (to as u64)),
|
2019-12-24 17:38:22 -05:00
|
|
|
));
|
2016-03-11 12:54:59 +02:00
|
|
|
}
|
2017-06-25 12:41:24 +03:00
|
|
|
|
2017-12-01 14:39:51 +02:00
|
|
|
// Cast the place pointer type to the new
|
2019-02-28 22:43:53 +00:00
|
|
|
// array or slice type (`*[%_; new_len]`).
|
2019-12-24 17:38:22 -05:00
|
|
|
subslice.llval = bx.pointercast(
|
|
|
|
|
subslice.llval,
|
|
|
|
|
bx.cx().type_ptr_to(bx.cx().backend_type(subslice.layout)),
|
|
|
|
|
);
|
2017-06-26 18:33:50 +03:00
|
|
|
|
2017-06-25 12:41:24 +03:00
|
|
|
subslice
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
2020-05-23 12:02:54 +02:00
|
|
|
mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, v),
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
|
|
|
|
}
|
2016-03-11 12:54:59 +02:00
|
|
|
};
|
2019-07-02 20:29:45 +02:00
|
|
|
debug!("codegen_place(place={:?}) => {:?}", place_ref, result);
|
2016-03-11 12:54:59 +02:00
|
|
|
result
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|
|
|
|
|
|
2020-03-04 18:25:03 -03:00
|
|
|
pub fn monomorphized_place_ty(&self, place_ref: mir::PlaceRef<'tcx>) -> Ty<'tcx> {
|
2018-09-20 15:47:22 +02:00
|
|
|
let tcx = self.cx.tcx();
|
2020-04-12 10:28:41 -07:00
|
|
|
let place_ty = mir::Place::ty_from(place_ref.local, place_ref.projection, self.mir, tcx);
|
2019-03-28 19:08:31 -07:00
|
|
|
self.monomorphize(&place_ty.ty)
|
2016-06-05 19:03:30 +03:00
|
|
|
}
|
2015-10-21 17:42:25 -04:00
|
|
|
}
|