Auto merge of #42727 - alexcrichton:allocators-new, r=eddyb
rustc: Implement the #[global_allocator] attribute This PR is an implementation of [RFC 1974] which specifies a new method of defining a global allocator for a program. This obsoletes the old `#![allocator]` attribute and also removes support for it. [RFC 1974]: https://github.com/rust-lang/rfcs/pull/1974 The new `#[global_allocator]` attribute solves many issues encountered with the `#![allocator]` attribute such as composition and restrictions on the crate graph itself. The compiler now has much more control over the ABI of the allocator and how it's implemented, allowing much more freedom in terms of how this feature is implemented. cc #27389
This commit is contained in:
@@ -13,7 +13,7 @@
|
||||
slightly, especially to possibly take into account the \
|
||||
types being stored to make room for a future \
|
||||
tracing garbage collector",
|
||||
issue = "27700")]
|
||||
issue = "32838")]
|
||||
|
||||
use core::cmp;
|
||||
use core::fmt;
|
||||
@@ -73,6 +73,7 @@ impl Layout {
|
||||
/// * `size`, when rounded up to the nearest multiple of `align`,
|
||||
/// must not overflow (i.e. the rounded value must be less than
|
||||
/// `usize::MAX`).
|
||||
#[inline]
|
||||
pub fn from_size_align(size: usize, align: usize) -> Option<Layout> {
|
||||
if !align.is_power_of_two() {
|
||||
return None;
|
||||
@@ -96,13 +97,28 @@ impl Layout {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Layout { size: size, align: align })
|
||||
unsafe {
|
||||
Some(Layout::from_size_align_unchecked(size, align))
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a layout, bypassing all checks.
|
||||
///
|
||||
/// # Unsafety
|
||||
///
|
||||
/// This function is unsafe as it does not verify that `align` is a power of
|
||||
/// two nor that `size` aligned to `align` fits within the address space.
|
||||
#[inline]
|
||||
pub unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Layout {
|
||||
Layout { size: size, align: align }
|
||||
}
|
||||
|
||||
/// The minimum size in bytes for a memory block of this layout.
|
||||
#[inline]
|
||||
pub fn size(&self) -> usize { self.size }
|
||||
|
||||
/// The minimum byte alignment for a memory block of this layout.
|
||||
#[inline]
|
||||
pub fn align(&self) -> usize { self.align }
|
||||
|
||||
/// Constructs a `Layout` suitable for holding a value of type `T`.
|
||||
@@ -135,6 +151,7 @@ impl Layout {
|
||||
///
|
||||
/// Panics if the combination of `self.size` and the given `align`
|
||||
/// violates the conditions listed in `from_size_align`.
|
||||
#[inline]
|
||||
pub fn align_to(&self, align: usize) -> Self {
|
||||
Layout::from_size_align(self.size, cmp::max(self.align, align)).unwrap()
|
||||
}
|
||||
@@ -155,6 +172,7 @@ impl Layout {
|
||||
/// to be less than or equal to the alignment of the starting
|
||||
/// address for the whole allocated block of memory. One way to
|
||||
/// satisfy this constraint is to ensure `align <= self.align`.
|
||||
#[inline]
|
||||
pub fn padding_needed_for(&self, align: usize) -> usize {
|
||||
let len = self.size();
|
||||
|
||||
@@ -556,6 +574,7 @@ pub unsafe trait Alloc {
|
||||
/// However, for clients that do not wish to track the capacity
|
||||
/// returned by `alloc_excess` locally, this method is likely to
|
||||
/// produce useful results.
|
||||
#[inline]
|
||||
fn usable_size(&self, layout: &Layout) -> (usize, usize) {
|
||||
(layout.size(), layout.size())
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
|
||||
use core::borrow;
|
||||
use core::fmt;
|
||||
use core::cmp::Ordering;
|
||||
use core::mem::{align_of_val, size_of_val};
|
||||
use core::intrinsics::abort;
|
||||
use core::mem;
|
||||
use core::mem::uninitialized;
|
||||
@@ -34,7 +33,8 @@ use core::marker::Unsize;
|
||||
use core::hash::{Hash, Hasher};
|
||||
use core::{isize, usize};
|
||||
use core::convert::From;
|
||||
use heap::deallocate;
|
||||
|
||||
use heap::{Heap, Alloc, Layout};
|
||||
|
||||
/// A soft limit on the amount of references that may be made to an `Arc`.
|
||||
///
|
||||
@@ -503,7 +503,7 @@ impl<T: ?Sized> Arc<T> {
|
||||
|
||||
if self.inner().weak.fetch_sub(1, Release) == 1 {
|
||||
atomic::fence(Acquire);
|
||||
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
|
||||
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1007,7 +1007,9 @@ impl<T: ?Sized> Drop for Weak<T> {
|
||||
// ref, which can only happen after the lock is released.
|
||||
if self.inner().weak.fetch_sub(1, Release) == 1 {
|
||||
atomic::fence(Acquire);
|
||||
unsafe { deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) }
|
||||
unsafe {
|
||||
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use heap;
|
||||
use heap::{Heap, Layout, Alloc};
|
||||
use raw_vec::RawVec;
|
||||
|
||||
use core::any::Any;
|
||||
@@ -135,8 +135,7 @@ pub struct Box<T: ?Sized>(Unique<T>);
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub struct IntermediateBox<T: ?Sized> {
|
||||
ptr: *mut u8,
|
||||
size: usize,
|
||||
align: usize,
|
||||
layout: Layout,
|
||||
marker: marker::PhantomData<*mut T>,
|
||||
}
|
||||
|
||||
@@ -156,23 +155,21 @@ unsafe fn finalize<T>(b: IntermediateBox<T>) -> Box<T> {
|
||||
}
|
||||
|
||||
fn make_place<T>() -> IntermediateBox<T> {
|
||||
let size = mem::size_of::<T>();
|
||||
let align = mem::align_of::<T>();
|
||||
let layout = Layout::new::<T>();
|
||||
|
||||
let p = if size == 0 {
|
||||
let p = if layout.size() == 0 {
|
||||
mem::align_of::<T>() as *mut u8
|
||||
} else {
|
||||
let p = unsafe { heap::allocate(size, align) };
|
||||
if p.is_null() {
|
||||
panic!("Box make_place allocation failure.");
|
||||
unsafe {
|
||||
Heap.alloc(layout.clone()).unwrap_or_else(|err| {
|
||||
Heap.oom(err)
|
||||
})
|
||||
}
|
||||
p
|
||||
};
|
||||
|
||||
IntermediateBox {
|
||||
ptr: p,
|
||||
size: size,
|
||||
align: align,
|
||||
layout: layout,
|
||||
marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -221,8 +218,10 @@ impl<T> Placer<T> for ExchangeHeapSingleton {
|
||||
issue = "27779")]
|
||||
impl<T: ?Sized> Drop for IntermediateBox<T> {
|
||||
fn drop(&mut self) {
|
||||
if self.size > 0 {
|
||||
unsafe { heap::deallocate(self.ptr, self.size, self.align) }
|
||||
if self.layout.size() > 0 {
|
||||
unsafe {
|
||||
Heap.dealloc(self.ptr, self.layout.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ use core::ptr::{self, Unique};
|
||||
use core::slice;
|
||||
|
||||
use boxed::Box;
|
||||
use heap;
|
||||
use heap::{Heap, Alloc, Layout};
|
||||
|
||||
const B: usize = 6;
|
||||
pub const MIN_LEN: usize = B - 1;
|
||||
@@ -254,11 +254,7 @@ impl<K, V> Root<K, V> {
|
||||
self.as_mut().as_leaf_mut().parent = ptr::null();
|
||||
|
||||
unsafe {
|
||||
heap::deallocate(
|
||||
top,
|
||||
mem::size_of::<InternalNode<K, V>>(),
|
||||
mem::align_of::<InternalNode<K, V>>()
|
||||
);
|
||||
Heap.dealloc(top, Layout::new::<InternalNode<K, V>>());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -445,7 +441,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
|
||||
> {
|
||||
let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
|
||||
let ret = self.ascend().ok();
|
||||
heap::deallocate(ptr, mem::size_of::<LeafNode<K, V>>(), mem::align_of::<LeafNode<K, V>>());
|
||||
Heap.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -466,11 +462,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
|
||||
> {
|
||||
let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
|
||||
let ret = self.ascend().ok();
|
||||
heap::deallocate(
|
||||
ptr,
|
||||
mem::size_of::<InternalNode<K, V>>(),
|
||||
mem::align_of::<InternalNode<K, V>>()
|
||||
);
|
||||
Heap.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -1252,16 +1244,14 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
|
||||
).correct_parent_link();
|
||||
}
|
||||
|
||||
heap::deallocate(
|
||||
Heap.dealloc(
|
||||
right_node.node.get() as *mut u8,
|
||||
mem::size_of::<InternalNode<K, V>>(),
|
||||
mem::align_of::<InternalNode<K, V>>()
|
||||
Layout::new::<InternalNode<K, V>>(),
|
||||
);
|
||||
} else {
|
||||
heap::deallocate(
|
||||
Heap.dealloc(
|
||||
right_node.node.get() as *mut u8,
|
||||
mem::size_of::<LeafNode<K, V>>(),
|
||||
mem::align_of::<LeafNode<K, V>>()
|
||||
Layout::new::<LeafNode<K, V>>(),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -8,207 +8,212 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![unstable(feature = "heap_api",
|
||||
#![unstable(feature = "allocator_api",
|
||||
reason = "the precise API and guarantees it provides may be tweaked \
|
||||
slightly, especially to possibly take into account the \
|
||||
types being stored to make room for a future \
|
||||
tracing garbage collector",
|
||||
issue = "27700")]
|
||||
issue = "32838")]
|
||||
|
||||
use allocator::{Alloc, AllocErr, CannotReallocInPlace, Layout};
|
||||
use core::{isize, usize, cmp, ptr};
|
||||
use core::intrinsics::{min_align_of_val, size_of_val};
|
||||
use core::mem::{self, ManuallyDrop};
|
||||
use core::usize;
|
||||
|
||||
#[allow(improper_ctypes)]
|
||||
extern "C" {
|
||||
#[allocator]
|
||||
fn __rust_allocate(size: usize, align: usize) -> *mut u8;
|
||||
fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8;
|
||||
fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
|
||||
fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;
|
||||
fn __rust_reallocate_inplace(ptr: *mut u8,
|
||||
old_size: usize,
|
||||
size: usize,
|
||||
align: usize)
|
||||
-> usize;
|
||||
fn __rust_usable_size(size: usize, align: usize) -> usize;
|
||||
pub use allocator::*;
|
||||
#[doc(hidden)]
|
||||
pub mod __core {
|
||||
pub use core::*;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_size_and_alignment(size: usize, align: usize) {
|
||||
debug_assert!(size != 0);
|
||||
debug_assert!(size <= isize::MAX as usize,
|
||||
"Tried to allocate too much: {} bytes",
|
||||
size);
|
||||
debug_assert!(usize::is_power_of_two(align),
|
||||
"Invalid alignment of allocation: {}",
|
||||
align);
|
||||
extern "Rust" {
|
||||
#[allocator]
|
||||
fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;
|
||||
fn __rust_oom(err: *const u8) -> !;
|
||||
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
|
||||
fn __rust_usable_size(layout: *const u8,
|
||||
min: *mut usize,
|
||||
max: *mut usize);
|
||||
fn __rust_realloc(ptr: *mut u8,
|
||||
old_size: usize,
|
||||
old_align: usize,
|
||||
new_size: usize,
|
||||
new_align: usize,
|
||||
err: *mut u8) -> *mut u8;
|
||||
fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;
|
||||
fn __rust_alloc_excess(size: usize,
|
||||
align: usize,
|
||||
excess: *mut usize,
|
||||
err: *mut u8) -> *mut u8;
|
||||
fn __rust_realloc_excess(ptr: *mut u8,
|
||||
old_size: usize,
|
||||
old_align: usize,
|
||||
new_size: usize,
|
||||
new_align: usize,
|
||||
excess: *mut usize,
|
||||
err: *mut u8) -> *mut u8;
|
||||
fn __rust_grow_in_place(ptr: *mut u8,
|
||||
old_size: usize,
|
||||
old_align: usize,
|
||||
new_size: usize,
|
||||
new_align: usize) -> u8;
|
||||
fn __rust_shrink_in_place(ptr: *mut u8,
|
||||
old_size: usize,
|
||||
old_align: usize,
|
||||
new_size: usize,
|
||||
new_align: usize) -> u8;
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug)]
|
||||
pub struct HeapAlloc;
|
||||
pub struct Heap;
|
||||
|
||||
unsafe impl Alloc for HeapAlloc {
|
||||
unsafe impl Alloc for Heap {
|
||||
#[inline]
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
let addr = allocate(layout.size(), layout.align());
|
||||
if addr.is_null() {
|
||||
Err(AllocErr::Exhausted { request: layout })
|
||||
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
||||
let ptr = __rust_alloc(layout.size(),
|
||||
layout.align(),
|
||||
&mut *err as *mut AllocErr as *mut u8);
|
||||
if ptr.is_null() {
|
||||
Err(ManuallyDrop::into_inner(err))
|
||||
} else {
|
||||
Ok(addr)
|
||||
Ok(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
let addr = allocate_zeroed(layout.size(), layout.align());
|
||||
if addr.is_null() {
|
||||
Err(AllocErr::Exhausted { request: layout })
|
||||
} else {
|
||||
Ok(addr)
|
||||
#[inline]
|
||||
fn oom(&mut self, err: AllocErr) -> ! {
|
||||
unsafe {
|
||||
__rust_oom(&err as *const AllocErr as *const u8)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
deallocate(ptr, layout.size(), layout.align());
|
||||
__rust_dealloc(ptr, layout.size(), layout.align())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn usable_size(&self, layout: &Layout) -> (usize, usize) {
|
||||
(layout.size(), usable_size(layout.size(), layout.align()))
|
||||
let mut min = 0;
|
||||
let mut max = 0;
|
||||
unsafe {
|
||||
__rust_usable_size(layout as *const Layout as *const u8,
|
||||
&mut min,
|
||||
&mut max);
|
||||
}
|
||||
(min, max)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(&mut self,
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout)
|
||||
-> Result<*mut u8, AllocErr>
|
||||
{
|
||||
let old_size = layout.size();
|
||||
let new_size = new_layout.size();
|
||||
if layout.align() == new_layout.align() {
|
||||
let new_ptr = reallocate(ptr, old_size, new_size, layout.align());
|
||||
if new_ptr.is_null() {
|
||||
// We assume `reallocate` already tried alloc + copy +
|
||||
// dealloc fallback; thus pointless to repeat effort
|
||||
Err(AllocErr::Exhausted { request: new_layout })
|
||||
} else {
|
||||
Ok(new_ptr)
|
||||
}
|
||||
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
||||
let ptr = __rust_realloc(ptr,
|
||||
layout.size(),
|
||||
layout.align(),
|
||||
new_layout.size(),
|
||||
new_layout.align(),
|
||||
&mut *err as *mut AllocErr as *mut u8);
|
||||
if ptr.is_null() {
|
||||
Err(ManuallyDrop::into_inner(err))
|
||||
} else {
|
||||
// if alignments don't match, fall back on alloc + copy + dealloc
|
||||
let result = self.alloc(new_layout);
|
||||
if let Ok(new_ptr) = result {
|
||||
ptr::copy_nonoverlapping(ptr as *const u8, new_ptr, cmp::min(old_size, new_size));
|
||||
self.dealloc(ptr, layout);
|
||||
}
|
||||
result
|
||||
mem::forget(err);
|
||||
Ok(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
||||
let ptr = __rust_alloc_zeroed(layout.size(),
|
||||
layout.align(),
|
||||
&mut *err as *mut AllocErr as *mut u8);
|
||||
if ptr.is_null() {
|
||||
Err(ManuallyDrop::into_inner(err))
|
||||
} else {
|
||||
Ok(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
|
||||
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
||||
let mut size = 0;
|
||||
let ptr = __rust_alloc_excess(layout.size(),
|
||||
layout.align(),
|
||||
&mut size,
|
||||
&mut *err as *mut AllocErr as *mut u8);
|
||||
if ptr.is_null() {
|
||||
Err(ManuallyDrop::into_inner(err))
|
||||
} else {
|
||||
Ok(Excess(ptr, size))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc_excess(&mut self,
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout) -> Result<Excess, AllocErr> {
|
||||
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
||||
let mut size = 0;
|
||||
let ptr = __rust_realloc_excess(ptr,
|
||||
layout.size(),
|
||||
layout.align(),
|
||||
new_layout.size(),
|
||||
new_layout.align(),
|
||||
&mut size,
|
||||
&mut *err as *mut AllocErr as *mut u8);
|
||||
if ptr.is_null() {
|
||||
Err(ManuallyDrop::into_inner(err))
|
||||
} else {
|
||||
Ok(Excess(ptr, size))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn grow_in_place(&mut self,
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout)
|
||||
-> Result<(), CannotReallocInPlace>
|
||||
{
|
||||
// grow_in_place spec requires this, and the spec for reallocate_inplace
|
||||
// makes it hard to detect failure if it does not hold.
|
||||
debug_assert!(new_layout.size() >= layout.size());
|
||||
|
||||
if layout.align() != new_layout.align() { // reallocate_inplace requires this.
|
||||
return Err(CannotReallocInPlace);
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
let ret = __rust_grow_in_place(ptr,
|
||||
layout.size(),
|
||||
layout.align(),
|
||||
new_layout.size(),
|
||||
new_layout.align());
|
||||
if ret != 0 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(CannotReallocInPlace)
|
||||
}
|
||||
let usable = reallocate_inplace(ptr, layout.size(), new_layout.size(), layout.align());
|
||||
if usable >= new_layout.size() { Ok(()) } else { Err(CannotReallocInPlace) }
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias`
|
||||
|
||||
/// Return a pointer to `size` bytes of memory aligned to `align`.
|
||||
///
|
||||
/// On failure, return a null pointer.
|
||||
///
|
||||
/// Behavior is undefined if the requested size is 0 or the alignment is not a
|
||||
/// power of 2. The alignment must be no larger than the largest supported page
|
||||
/// size on the platform.
|
||||
#[inline]
|
||||
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
|
||||
check_size_and_alignment(size, align);
|
||||
__rust_allocate(size, align)
|
||||
}
|
||||
|
||||
/// Return a pointer to `size` bytes of memory aligned to `align` and
|
||||
/// initialized to zeroes.
|
||||
///
|
||||
/// On failure, return a null pointer.
|
||||
///
|
||||
/// Behavior is undefined if the requested size is 0 or the alignment is not a
|
||||
/// power of 2. The alignment must be no larger than the largest supported page
|
||||
/// size on the platform.
|
||||
#[inline]
|
||||
pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 {
|
||||
check_size_and_alignment(size, align);
|
||||
__rust_allocate_zeroed(size, align)
|
||||
}
|
||||
|
||||
/// Resize the allocation referenced by `ptr` to `size` bytes.
|
||||
///
|
||||
/// On failure, return a null pointer and leave the original allocation intact.
|
||||
///
|
||||
/// If the allocation was relocated, the memory at the passed-in pointer is
|
||||
/// undefined after the call.
|
||||
///
|
||||
/// Behavior is undefined if the requested size is 0 or the alignment is not a
|
||||
/// power of 2. The alignment must be no larger than the largest supported page
|
||||
/// size on the platform.
|
||||
///
|
||||
/// The `old_size` and `align` parameters are the parameters that were used to
|
||||
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
|
||||
/// any value in range_inclusive(requested_size, usable_size).
|
||||
#[inline]
|
||||
pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
|
||||
check_size_and_alignment(size, align);
|
||||
__rust_reallocate(ptr, old_size, size, align)
|
||||
}
|
||||
|
||||
/// Resize the allocation referenced by `ptr` to `size` bytes.
|
||||
///
|
||||
/// If the operation succeeds, it returns `usable_size(size, align)` and if it
|
||||
/// fails (or is a no-op) it returns `usable_size(old_size, align)`.
|
||||
///
|
||||
/// Behavior is undefined if the requested size is 0 or the alignment is not a
|
||||
/// power of 2. The alignment must be no larger than the largest supported page
|
||||
/// size on the platform.
|
||||
///
|
||||
/// The `old_size` and `align` parameters are the parameters that were used to
|
||||
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
|
||||
/// any value in range_inclusive(requested_size, usable_size).
|
||||
#[inline]
|
||||
pub unsafe fn reallocate_inplace(ptr: *mut u8,
|
||||
old_size: usize,
|
||||
size: usize,
|
||||
align: usize)
|
||||
-> usize {
|
||||
check_size_and_alignment(size, align);
|
||||
__rust_reallocate_inplace(ptr, old_size, size, align)
|
||||
}
|
||||
|
||||
/// Deallocates the memory referenced by `ptr`.
|
||||
///
|
||||
/// The `ptr` parameter must not be null.
|
||||
///
|
||||
/// The `old_size` and `align` parameters are the parameters that were used to
|
||||
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
|
||||
/// any value in range_inclusive(requested_size, usable_size).
|
||||
#[inline]
|
||||
pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) {
|
||||
__rust_deallocate(ptr, old_size, align)
|
||||
}
|
||||
|
||||
/// Returns the usable size of an allocation created with the specified the
|
||||
/// `size` and `align`.
|
||||
#[inline]
|
||||
pub fn usable_size(size: usize, align: usize) -> usize {
|
||||
unsafe { __rust_usable_size(size, align) }
|
||||
#[inline]
|
||||
unsafe fn shrink_in_place(&mut self,
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
|
||||
debug_assert!(new_layout.size() <= layout.size());
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
let ret = __rust_shrink_in_place(ptr,
|
||||
layout.size(),
|
||||
layout.align(),
|
||||
new_layout.size(),
|
||||
new_layout.align());
|
||||
if ret != 0 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(CannotReallocInPlace)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An arbitrary non-null address to represent zero-size allocations.
|
||||
@@ -228,11 +233,10 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
||||
if size == 0 {
|
||||
align as *mut u8
|
||||
} else {
|
||||
let ptr = allocate(size, align);
|
||||
if ptr.is_null() {
|
||||
::oom()
|
||||
}
|
||||
ptr
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
Heap.alloc(layout).unwrap_or_else(|err| {
|
||||
Heap.oom(err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -243,7 +247,8 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
|
||||
let align = min_align_of_val(&*ptr);
|
||||
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
|
||||
if size != 0 {
|
||||
deallocate(ptr as *mut u8, size, align);
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
Heap.dealloc(ptr as *mut u8, layout);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -252,38 +257,22 @@ mod tests {
|
||||
extern crate test;
|
||||
use self::test::Bencher;
|
||||
use boxed::Box;
|
||||
use heap;
|
||||
use heap::{Heap, Alloc, Layout};
|
||||
|
||||
#[test]
|
||||
fn allocate_zeroed() {
|
||||
unsafe {
|
||||
let size = 1024;
|
||||
let ptr = heap::allocate_zeroed(size, 1);
|
||||
if ptr.is_null() {
|
||||
::oom()
|
||||
}
|
||||
let layout = Layout::from_size_align(1024, 1).unwrap();
|
||||
let ptr = Heap.alloc_zeroed(layout.clone())
|
||||
.unwrap_or_else(|e| Heap.oom(e));
|
||||
|
||||
let end = ptr.offset(size as isize);
|
||||
let end = ptr.offset(layout.size() as isize);
|
||||
let mut i = ptr;
|
||||
while i < end {
|
||||
assert_eq!(*i, 0);
|
||||
i = i.offset(1);
|
||||
}
|
||||
heap::deallocate(ptr, size, 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_reallocate_inplace_noop() {
|
||||
unsafe {
|
||||
let size = 4000;
|
||||
let ptr = heap::allocate(size, 8);
|
||||
if ptr.is_null() {
|
||||
::oom()
|
||||
}
|
||||
let ret = heap::reallocate_inplace(ptr, size, size, 8);
|
||||
heap::deallocate(ptr, size, 8);
|
||||
assert_eq!(ret, heap::usable_size(size, 8));
|
||||
Heap.dealloc(ptr, layout);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@
|
||||
#![cfg_attr(not(test), feature(slice_rotate))]
|
||||
#![cfg_attr(not(test), feature(str_checked_slicing))]
|
||||
#![cfg_attr(test, feature(rand, test))]
|
||||
#![feature(allocator)]
|
||||
#![cfg_attr(stage0, feature(allocator))]
|
||||
#![feature(allow_internal_unstable)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(box_syntax)]
|
||||
@@ -124,6 +124,7 @@
|
||||
#![feature(unicode)]
|
||||
#![feature(unique)]
|
||||
#![feature(unsize)]
|
||||
#![cfg_attr(not(stage0), feature(allocator_internals))]
|
||||
|
||||
#![cfg_attr(not(test), feature(fused, fn_traits, placement_new_protocol))]
|
||||
#![cfg_attr(test, feature(test, box_heap))]
|
||||
@@ -168,7 +169,6 @@ mod boxed_test;
|
||||
pub mod arc;
|
||||
pub mod rc;
|
||||
pub mod raw_vec;
|
||||
pub mod oom;
|
||||
|
||||
// collections modules
|
||||
pub mod binary_heap;
|
||||
@@ -260,8 +260,6 @@ trait SpecExtend<I: IntoIterator> {
|
||||
fn spec_extend(&mut self, iter: I);
|
||||
}
|
||||
|
||||
pub use oom::oom;
|
||||
|
||||
#[doc(no_inline)]
|
||||
pub use binary_heap::BinaryHeap;
|
||||
#[doc(no_inline)]
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[cfg(target_has_atomic = "ptr")]
|
||||
pub use self::imp::set_oom_handler;
|
||||
use core::intrinsics;
|
||||
|
||||
fn default_oom_handler() -> ! {
|
||||
// The default handler can't do much more since we can't assume the presence
|
||||
// of libc or any way of printing an error message.
|
||||
unsafe { intrinsics::abort() }
|
||||
}
|
||||
|
||||
/// Common out-of-memory routine
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
#[unstable(feature = "oom", reason = "not a scrutinized interface",
|
||||
issue = "27700")]
|
||||
pub fn oom() -> ! {
|
||||
self::imp::oom()
|
||||
}
|
||||
|
||||
#[cfg(target_has_atomic = "ptr")]
|
||||
mod imp {
|
||||
use core::mem;
|
||||
use core::sync::atomic::{AtomicPtr, Ordering};
|
||||
|
||||
static OOM_HANDLER: AtomicPtr<()> = AtomicPtr::new(super::default_oom_handler as *mut ());
|
||||
|
||||
#[inline(always)]
|
||||
pub fn oom() -> ! {
|
||||
let value = OOM_HANDLER.load(Ordering::SeqCst);
|
||||
let handler: fn() -> ! = unsafe { mem::transmute(value) };
|
||||
handler();
|
||||
}
|
||||
|
||||
/// Set a custom handler for out-of-memory conditions
|
||||
///
|
||||
/// To avoid recursive OOM failures, it is critical that the OOM handler does
|
||||
/// not allocate any memory itself.
|
||||
#[unstable(feature = "oom", reason = "not a scrutinized interface",
|
||||
issue = "27700")]
|
||||
pub fn set_oom_handler(handler: fn() -> !) {
|
||||
OOM_HANDLER.store(handler as *mut (), Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_has_atomic = "ptr"))]
|
||||
mod imp {
|
||||
#[inline(always)]
|
||||
pub fn oom() -> ! {
|
||||
super::default_oom_handler()
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@ use allocator::{Alloc, Layout};
|
||||
use core::ptr::{self, Unique};
|
||||
use core::mem;
|
||||
use core::slice;
|
||||
use heap::{HeapAlloc};
|
||||
use heap::Heap;
|
||||
use super::boxed::Box;
|
||||
use core::ops::Drop;
|
||||
use core::cmp;
|
||||
@@ -45,7 +45,7 @@ use core::cmp;
|
||||
/// field. This allows zero-sized types to not be special-cased by consumers of
|
||||
/// this type.
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub struct RawVec<T, A: Alloc = HeapAlloc> {
|
||||
pub struct RawVec<T, A: Alloc = Heap> {
|
||||
ptr: Unique<T>,
|
||||
cap: usize,
|
||||
a: A,
|
||||
@@ -112,14 +112,14 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RawVec<T, HeapAlloc> {
|
||||
impl<T> RawVec<T, Heap> {
|
||||
/// Creates the biggest possible RawVec (on the system heap)
|
||||
/// without allocating. If T has positive size, then this makes a
|
||||
/// RawVec with capacity 0. If T has 0 size, then it it makes a
|
||||
/// RawVec with capacity `usize::MAX`. Useful for implementing
|
||||
/// delayed allocation.
|
||||
pub fn new() -> Self {
|
||||
Self::new_in(HeapAlloc)
|
||||
Self::new_in(Heap)
|
||||
}
|
||||
|
||||
/// Creates a RawVec (on the system heap) with exactly the
|
||||
@@ -139,13 +139,13 @@ impl<T> RawVec<T, HeapAlloc> {
|
||||
/// Aborts on OOM
|
||||
#[inline]
|
||||
pub fn with_capacity(cap: usize) -> Self {
|
||||
RawVec::allocate_in(cap, false, HeapAlloc)
|
||||
RawVec::allocate_in(cap, false, Heap)
|
||||
}
|
||||
|
||||
/// Like `with_capacity` but guarantees the buffer is zeroed.
|
||||
#[inline]
|
||||
pub fn with_capacity_zeroed(cap: usize) -> Self {
|
||||
RawVec::allocate_in(cap, true, HeapAlloc)
|
||||
RawVec::allocate_in(cap, true, Heap)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -166,7 +166,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RawVec<T, HeapAlloc> {
|
||||
impl<T> RawVec<T, Heap> {
|
||||
/// Reconstitutes a RawVec from a pointer, capacity.
|
||||
///
|
||||
/// # Undefined Behavior
|
||||
@@ -178,7 +178,7 @@ impl<T> RawVec<T, HeapAlloc> {
|
||||
RawVec {
|
||||
ptr: Unique::new(ptr),
|
||||
cap: cap,
|
||||
a: HeapAlloc,
|
||||
a: Heap,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -609,7 +609,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RawVec<T, HeapAlloc> {
|
||||
impl<T> RawVec<T, Heap> {
|
||||
/// Converts the entire buffer into `Box<[T]>`.
|
||||
///
|
||||
/// While it is not *strictly* Undefined Behavior to call
|
||||
@@ -693,13 +693,13 @@ mod tests {
|
||||
if size > self.fuel {
|
||||
return Err(AllocErr::Unsupported { details: "fuel exhausted" });
|
||||
}
|
||||
match HeapAlloc.alloc(layout) {
|
||||
match Heap.alloc(layout) {
|
||||
ok @ Ok(_) => { self.fuel -= size; ok }
|
||||
err @ Err(_) => err,
|
||||
}
|
||||
}
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
HeapAlloc.dealloc(ptr, layout)
|
||||
Heap.dealloc(ptr, layout)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -252,13 +252,13 @@ use core::hash::{Hash, Hasher};
|
||||
use core::intrinsics::abort;
|
||||
use core::marker;
|
||||
use core::marker::Unsize;
|
||||
use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
|
||||
use core::mem::{self, forget, size_of, size_of_val, uninitialized};
|
||||
use core::ops::Deref;
|
||||
use core::ops::CoerceUnsized;
|
||||
use core::ptr::{self, Shared};
|
||||
use core::convert::From;
|
||||
|
||||
use heap::{allocate, deallocate, box_free};
|
||||
use heap::{Heap, Alloc, Layout, box_free};
|
||||
use raw_vec::RawVec;
|
||||
|
||||
struct RcBox<T: ?Sized> {
|
||||
@@ -461,7 +461,8 @@ impl<T> Rc<[T]> {
|
||||
// FIXME(custom-DST): creating this invalid &[T] is dubiously defined,
|
||||
// we should have a better way of getting the size/align
|
||||
// of a DST from its unsized part.
|
||||
let ptr = allocate(size_of_val(&*ptr), align_of_val(&*ptr));
|
||||
let ptr = Heap.alloc(Layout::for_value(&*ptr))
|
||||
.unwrap_or_else(|e| Heap.oom(e));
|
||||
let ptr: *mut RcBox<[T]> = mem::transmute([ptr as usize, value.len()]);
|
||||
|
||||
// Initialize the new RcBox.
|
||||
@@ -719,7 +720,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
|
||||
self.dec_weak();
|
||||
|
||||
if self.weak() == 0 {
|
||||
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
|
||||
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1097,7 +1098,7 @@ impl<T: ?Sized> Drop for Weak<T> {
|
||||
// the weak count starts at 1, and will only go to zero if all
|
||||
// the strong pointers have disappeared.
|
||||
if self.weak() == 0 {
|
||||
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
|
||||
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user