Skip to content

Commit

Permalink
Use NonNull<Void> instead of *mut u8 in the Alloc trait
Browse files Browse the repository at this point in the history
Fixes #49608
  • Loading branch information
glandium authored and SimonSapin committed Apr 12, 2018
1 parent fd242ee commit fddf51e
Show file tree
Hide file tree
Showing 18 changed files with 136 additions and 129 deletions.
2 changes: 1 addition & 1 deletion src/doc/nomicon
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ looks like:
#![feature(global_allocator, allocator_api, heap_api)]

use std::alloc::{GlobalAlloc, System, Layout, Void};
use std::ptr::NonNull;

struct MyAllocator;

Expand Down
19 changes: 10 additions & 9 deletions src/liballoc/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
issue = "32838")]

use core::intrinsics::{min_align_of_val, size_of_val};
use core::ptr::NonNull;
use core::usize;

#[doc(inline)]
Expand Down Expand Up @@ -120,27 +121,27 @@ unsafe impl GlobalAlloc for Global {

unsafe impl Alloc for Global {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
GlobalAlloc::alloc(self, layout).into()
}

#[inline]
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
}

#[inline]
unsafe fn realloc(&mut self,
ptr: *mut u8,
ptr: NonNull<Void>,
layout: Layout,
new_size: usize)
-> Result<*mut u8, AllocErr>
-> Result<NonNull<Void>, AllocErr>
{
GlobalAlloc::realloc(self, ptr as *mut Void, layout, new_size).into()
GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size).into()
}

#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
GlobalAlloc::alloc_zeroed(self, layout).into()
}

Expand Down Expand Up @@ -195,8 +196,8 @@ mod tests {
let ptr = Global.alloc_zeroed(layout.clone())
.unwrap_or_else(|_| Global.oom());

let end = ptr.offset(layout.size() as isize);
let mut i = ptr;
let mut i = ptr.cast::<u8>().as_ptr();
let end = i.offset(layout.size() as isize);
while i < end {
assert_eq!(*i, 0);
i = i.offset(1);
Expand Down
16 changes: 6 additions & 10 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -512,15 +512,13 @@ impl<T: ?Sized> Arc<T> {
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = self.ptr.as_ptr();

// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(&mut self.ptr.as_mut().data);

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
}
}

Expand Down Expand Up @@ -558,7 +556,7 @@ impl<T: ?Sized> Arc<T> {
.unwrap_or_else(|_| Global.oom());

// Initialize the real ArcInner
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;

ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
Expand Down Expand Up @@ -625,7 +623,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
// In the event of a panic, elements that have been written
// into the new ArcInner will be dropped, then the memory freed.
struct Guard<T> {
mem: *mut u8,
mem: NonNull<u8>,
elems: *mut T,
layout: Layout,
n_elems: usize,
Expand All @@ -639,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);

Global.dealloc(self.mem, self.layout.clone());
Global.dealloc(self.mem.as_void(), self.layout.clone());
}
}
}
Expand All @@ -655,7 +653,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let elems = &mut (*ptr).data as *mut [T] as *mut T;

let mut guard = Guard{
mem: mem,
mem: NonNull::new_unchecked(mem),
elems: elems,
layout: layout,
n_elems: 0,
Expand Down Expand Up @@ -1147,8 +1145,6 @@ impl<T: ?Sized> Drop for Weak<T> {
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
let ptr = self.ptr.as_ptr();

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
// the memory orderings
Expand All @@ -1160,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
}
}
}
Expand Down
16 changes: 8 additions & 8 deletions src/liballoc/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ impl<K, V> Root<K, V> {
pub fn pop_level(&mut self) {
debug_assert!(self.height > 0);

let top = self.node.ptr.as_ptr() as *mut u8;
let top = self.node.ptr;

self.node = unsafe {
BoxedNode::from_ptr(self.as_mut()
Expand All @@ -249,7 +249,7 @@ impl<K, V> Root<K, V> {
self.as_mut().as_leaf_mut().parent = ptr::null();

unsafe {
Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
Global.dealloc(NonNull::from(top).as_void(), Layout::new::<InternalNode<K, V>>());
}
}
}
Expand Down Expand Up @@ -433,9 +433,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
marker::Edge
>
> {
let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
Global.dealloc(node.as_void(), Layout::new::<LeafNode<K, V>>());
ret
}
}
Expand All @@ -454,9 +454,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
marker::Edge
>
> {
let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
Global.dealloc(node.as_void(), Layout::new::<InternalNode<K, V>>());
ret
}
}
Expand Down Expand Up @@ -1239,12 +1239,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}

Global.dealloc(
right_node.node.as_ptr() as *mut u8,
right_node.node.as_void(),
Layout::new::<InternalNode<K, V>>(),
);
} else {
Global.dealloc(
right_node.node.as_ptr() as *mut u8,
right_node.node.as_void(),
Layout::new::<LeafNode<K, V>>(),
);
}
Expand Down
22 changes: 17 additions & 5 deletions src/liballoc/heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,20 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

pub use alloc::{Excess, Layout, AllocErr, CannotReallocInPlace};
#![allow(deprecated)]

pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Void};
use core::alloc::Alloc as CoreAlloc;
use core::ptr::NonNull;

#[doc(hidden)]
pub mod __core {
pub use core::*;
}

#[derive(Debug)]
pub struct Excess(pub *mut u8, pub usize);

/// Compatibility with older versions of #[global_allocator] during bootstrap
pub unsafe trait Alloc {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
Expand All @@ -42,13 +48,13 @@ pub unsafe trait Alloc {
new_layout: Layout) -> Result<(), CannotReallocInPlace>;
}

#[allow(deprecated)]
unsafe impl<T> Alloc for T where T: CoreAlloc {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
CoreAlloc::alloc(self, layout)
CoreAlloc::alloc(self, layout).map(|ptr| ptr.cast().as_ptr())
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
CoreAlloc::dealloc(self, ptr, layout)
}

Expand All @@ -64,35 +70,41 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<*mut u8, AllocErr> {
CoreAlloc::realloc(self, ptr, layout, new_layout.size())
let ptr = NonNull::new_unchecked(ptr as *mut Void);
CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
}

unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
CoreAlloc::alloc_zeroed(self, layout)
CoreAlloc::alloc_zeroed(self, layout).map(|ptr| ptr.cast().as_ptr())
}

unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
CoreAlloc::alloc_excess(self, layout)
.map(|e| Excess(e.0 .cast().as_ptr(), e.1))
}

unsafe fn realloc_excess(&mut self,
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<Excess, AllocErr> {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
.map(|e| Excess(e.0 .cast().as_ptr(), e.1))
}

unsafe fn grow_in_place(&mut self,
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
}

unsafe fn shrink_in_place(&mut self,
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
}
}
1 change: 1 addition & 0 deletions src/liballoc/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@
#![feature(lang_items)]
#![feature(libc)]
#![feature(needs_allocator)]
#![feature(nonnull_cast)]
#![feature(nonzero)]
#![feature(optin_builtin_traits)]
#![feature(pattern)]
Expand Down
Loading

0 comments on commit fddf51e

Please sign in to comment.