Correctly account for different address spaces in LLVM intrinsic invocations

This commit is contained in:
sayantn
2025-06-15 22:13:09 +05:30
parent 9415f3d8a6
commit a9500d6b0b
2 changed files with 24 additions and 19 deletions

View File

@@ -1394,7 +1394,7 @@ impl<'ll> StaticBuilderMethods for Builder<'_, 'll, '_> {
let global = self.cx().get_static(def_id);
if self.cx().tcx.is_thread_local_static(def_id) {
let pointer =
self.call_intrinsic("llvm.threadlocal.address", &[self.type_ptr()], &[global]);
self.call_intrinsic("llvm.threadlocal.address", &[self.val_ty(global)], &[global]);
// Cast to default address space if globals are in a different addrspace
self.pointercast(pointer, self.type_ptr())
} else {
@@ -1609,7 +1609,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
return;
}
self.call_intrinsic(intrinsic, &[self.type_ptr()], &[self.cx.const_u64(size), ptr]);
self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[self.cx.const_u64(size), ptr]);
}
}
impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {

View File

@@ -154,8 +154,6 @@ fn call_simple_intrinsic<'ll, 'tcx>(
sym::roundf64 => ("llvm.round", &[bx.type_f64()]),
sym::roundf128 => ("llvm.round", &[bx.type_f128()]),
sym::ptr_mask => ("llvm.ptrmask", &[bx.type_ptr(), bx.type_isize()]),
_ => return None,
};
Some(bx.call_intrinsic(
@@ -181,6 +179,14 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
let simple = call_simple_intrinsic(self, name, args);
let llval = match name {
_ if simple.is_some() => simple.unwrap(),
sym::ptr_mask => {
let ptr = args[0].immediate();
self.call_intrinsic(
"llvm.ptrmask",
&[self.val_ty(ptr), self.type_isize()],
&[ptr, args[1].immediate()],
)
}
sym::is_val_statically_known => {
if let OperandValue::Immediate(imm) = args[0].val {
self.call_intrinsic(
@@ -232,11 +238,14 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
return Ok(());
}
sym::breakpoint => self.call_intrinsic("llvm.debugtrap", &[], &[]),
sym::va_copy => self.call_intrinsic(
"llvm.va_copy",
&[self.type_ptr()],
&[args[0].immediate(), args[1].immediate()],
),
sym::va_copy => {
let dest = args[0].immediate();
self.call_intrinsic(
"llvm.va_copy",
&[self.val_ty(dest)],
&[dest, args[1].immediate()],
)
}
sym::va_arg => {
match result.layout.backend_repr {
BackendRepr::Scalar(scalar) => {
@@ -309,15 +318,11 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
sym::prefetch_write_instruction => (1, 0),
_ => bug!(),
};
let ptr = args[0].immediate();
self.call_intrinsic(
"llvm.prefetch",
&[self.type_ptr()],
&[
args[0].immediate(),
self.const_i32(rw),
args[1].immediate(),
self.const_i32(cache_type),
],
&[self.val_ty(ptr)],
&[ptr, self.const_i32(rw), args[1].immediate(), self.const_i32(cache_type)],
)
}
sym::carrying_mul_add => {
@@ -637,11 +642,11 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
}
fn va_start(&mut self, va_list: &'ll Value) -> &'ll Value {
self.call_intrinsic("llvm.va_start", &[self.type_ptr()], &[va_list])
self.call_intrinsic("llvm.va_start", &[self.val_ty(va_list)], &[va_list])
}
fn va_end(&mut self, va_list: &'ll Value) -> &'ll Value {
self.call_intrinsic("llvm.va_end", &[self.type_ptr()], &[va_list])
self.call_intrinsic("llvm.va_end", &[self.val_ty(va_list)], &[va_list])
}
}
@@ -1018,7 +1023,7 @@ fn codegen_emcc_try<'ll, 'tcx>(
let selector = bx.extract_value(vals, 1);
// Check if the typeid we got is the one for a Rust panic.
let rust_typeid = bx.call_intrinsic("llvm.eh.typeid.for", &[bx.type_ptr()], &[tydesc]);
let rust_typeid = bx.call_intrinsic("llvm.eh.typeid.for", &[bx.val_ty(tydesc)], &[tydesc]);
let is_rust_panic = bx.icmp(IntPredicate::IntEQ, selector, rust_typeid);
let is_rust_panic = bx.zext(is_rust_panic, bx.type_bool());