Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use NonNull in the allocator API #49608

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/dlmalloc
Submodule dlmalloc updated 2 files
+17 −16 src/global.rs
+21 −29 src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,16 @@ looks like:
#![feature(global_allocator, allocator_api, heap_api)]

use std::heap::{Alloc, System, Layout, AllocErr};
use std::ptr::NonNull;

struct MyAllocator;

unsafe impl<'a> Alloc for &'a MyAllocator {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
System.alloc(layout)
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
System.dealloc(ptr, layout)
}
}
Expand Down
14 changes: 5 additions & 9 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -513,15 +513,13 @@ impl<T: ?Sized> Arc<T> {
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = self.ptr.as_ptr();

// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(&mut self.ptr.as_mut().data);

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}

Expand Down Expand Up @@ -559,7 +557,7 @@ impl<T: ?Sized> Arc<T> {
.unwrap_or_else(|e| Heap.oom(e));

// Initialize the real ArcInner
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr()) as *mut ArcInner<T>;

ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
Expand Down Expand Up @@ -626,7 +624,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
// In the event of a panic, elements that have been written
// into the new ArcInner will be dropped, then the memory freed.
struct Guard<T> {
mem: *mut u8,
mem: NonNull<u8>,
elems: *mut T,
layout: Layout,
n_elems: usize,
Expand Down Expand Up @@ -656,7 +654,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let elems = &mut (*ptr).data as *mut [T] as *mut T;

let mut guard = Guard{
mem: mem,
mem: NonNull::new_unchecked(mem),
elems: elems,
layout: layout,
n_elems: 0,
Expand Down Expand Up @@ -1148,8 +1146,6 @@ impl<T: ?Sized> Drop for Weak<T> {
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
let ptr = self.ptr.as_ptr();

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
// the memory orderings
Expand All @@ -1161,7 +1157,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}
}
Expand Down
16 changes: 8 additions & 8 deletions src/liballoc/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ impl<K, V> Root<K, V> {
pub fn pop_level(&mut self) {
debug_assert!(self.height > 0);

let top = self.node.ptr.as_ptr() as *mut u8;
let top = self.node.ptr;

self.node = unsafe {
BoxedNode::from_ptr(self.as_mut()
Expand All @@ -250,7 +250,7 @@ impl<K, V> Root<K, V> {
self.as_mut().as_leaf_mut().parent = ptr::null();

unsafe {
Heap.dealloc(top, Layout::new::<InternalNode<K, V>>());
Heap.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
}
}
}
Expand Down Expand Up @@ -434,9 +434,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
marker::Edge
>
> {
let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
let node = self.node;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
Heap.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
ret
}
}
Expand All @@ -455,9 +455,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
marker::Edge
>
> {
let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
let node = self.node;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
Heap.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
ret
}
}
Expand Down Expand Up @@ -1240,12 +1240,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}

Heap.dealloc(
right_node.node.as_ptr() as *mut u8,
right_node.node.cast(),
Layout::new::<InternalNode<K, V>>(),
);
} else {
Heap.dealloc(
right_node.node.as_ptr() as *mut u8,
right_node.node.cast(),
Layout::new::<LeafNode<K, V>>(),
);
}
Expand Down
Loading