From fddf51ee0b9765484fc316dbf3d4feb8ceea715d Mon Sep 17 00:00:00 2001 From: Mike Hommey Date: Tue, 3 Apr 2018 08:51:02 +0900 Subject: [PATCH] Use NonNull instead of *mut u8 in the Alloc trait Fixes #49608 --- src/doc/nomicon | 2 +- .../src/language-features/global-allocator.md | 1 + src/liballoc/alloc.rs | 19 +++--- src/liballoc/arc.rs | 16 ++--- src/liballoc/btree/node.rs | 16 ++--- src/liballoc/heap.rs | 22 +++++-- src/liballoc/lib.rs | 1 + src/liballoc/raw_vec.rs | 40 ++++++------- src/liballoc/rc.rs | 18 +++--- src/liballoc/tests/heap.rs | 3 +- src/liballoc_system/lib.rs | 29 +++++----- src/libcore/alloc.rs | 58 ++++++++----------- src/libcore/ptr.rs | 8 +++ src/libstd/collections/hash/table.rs | 6 +- src/libstd/lib.rs | 1 + src/test/run-pass/allocator/xcrate-use2.rs | 2 +- src/test/run-pass/realloc-16687.rs | 18 +++--- src/test/run-pass/regions-mock-trans.rs | 5 +- 18 files changed, 136 insertions(+), 129 deletions(-) diff --git a/src/doc/nomicon b/src/doc/nomicon index 6a8f0a27e9a58..498ac2997420f 160000 --- a/src/doc/nomicon +++ b/src/doc/nomicon @@ -1 +1 @@ -Subproject commit 6a8f0a27e9a58c55c89d07bc43a176fdae5e051c +Subproject commit 498ac2997420f7b25f7cd0a3f8202950d8ad93ec diff --git a/src/doc/unstable-book/src/language-features/global-allocator.md b/src/doc/unstable-book/src/language-features/global-allocator.md index 6ce12ba684ddc..a3f3ee65bf014 100644 --- a/src/doc/unstable-book/src/language-features/global-allocator.md +++ b/src/doc/unstable-book/src/language-features/global-allocator.md @@ -30,6 +30,7 @@ looks like: #![feature(global_allocator, allocator_api, heap_api)] use std::alloc::{GlobalAlloc, System, Layout, Void}; +use std::ptr::NonNull; struct MyAllocator; diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 063f0543ec4a6..af48aa7961e06 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -16,6 +16,7 @@ issue = "32838")] use core::intrinsics::{min_align_of_val, size_of_val}; +use core::ptr::NonNull; use core::usize; #[doc(inline)] @@ -120,27 +121,27 @@ unsafe impl GlobalAlloc for Global { unsafe impl Alloc for Global { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { GlobalAlloc::alloc(self, layout).into() } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { - GlobalAlloc::dealloc(self, ptr as *mut Void, layout) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) } #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_size: usize) - -> Result<*mut u8, AllocErr> + -> Result, AllocErr> { - GlobalAlloc::realloc(self, ptr as *mut Void, layout, new_size).into() + GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size).into() } #[inline] - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { GlobalAlloc::alloc_zeroed(self, layout).into() } @@ -195,8 +196,8 @@ mod tests { let ptr = Global.alloc_zeroed(layout.clone()) .unwrap_or_else(|_| Global.oom()); - let end = ptr.offset(layout.size() as isize); - let mut i = ptr; + let mut i = ptr.cast::().as_ptr(); + let end = i.offset(layout.size() as isize); while i < end { assert_eq!(*i, 0); i = i.offset(1); diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index f0a325530ba1e..88754ace3ce2c 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -512,15 +512,13 @@ impl Arc { // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { - let ptr = self.ptr.as_ptr(); - // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). ptr::drop_in_place(&mut self.ptr.as_mut().data); if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)) + Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref())) } } @@ -558,7 +556,7 @@ impl Arc { .unwrap_or_else(|_| Global.oom()); // Initialize the real ArcInner - let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner; ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); @@ -625,7 +623,7 @@ impl ArcFromSlice for Arc<[T]> { // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. struct Guard { - mem: *mut u8, + mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, @@ -639,7 +637,7 @@ impl ArcFromSlice for Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem, self.layout.clone()); + Global.dealloc(self.mem.as_void(), self.layout.clone()); } } } @@ -655,7 +653,7 @@ impl ArcFromSlice for Arc<[T]> { let elems = &mut (*ptr).data as *mut [T] as *mut T; let mut guard = Guard{ - mem: mem, + mem: NonNull::new_unchecked(mem), elems: elems, layout: layout, n_elems: 0, @@ -1147,8 +1145,6 @@ impl Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let ptr = self.ptr.as_ptr(); - // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about // the memory orderings @@ -1160,7 +1156,7 @@ impl Drop for Weak { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { - Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)) + Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref())) } } } diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs index 8e23228bd28fe..64aa40ac166e6 100644 --- a/src/liballoc/btree/node.rs +++ b/src/liballoc/btree/node.rs @@ -236,7 +236,7 @@ impl Root { pub fn pop_level(&mut self) { debug_assert!(self.height > 0); - let top = self.node.ptr.as_ptr() as *mut u8; + let top = self.node.ptr; self.node = unsafe { BoxedNode::from_ptr(self.as_mut() @@ -249,7 +249,7 @@ impl Root { self.as_mut().as_leaf_mut().parent = ptr::null(); unsafe { - Global.dealloc(top, Layout::new::>()); + Global.dealloc(NonNull::from(top).as_void(), Layout::new::>()); } } } @@ -433,9 +433,9 @@ impl NodeRef { marker::Edge > > { - let ptr = self.as_leaf() as *const LeafNode as *const u8 as *mut u8; + let node = self.node; let ret = self.ascend().ok(); - Global.dealloc(ptr, Layout::new::>()); + Global.dealloc(node.as_void(), Layout::new::>()); ret } } @@ -454,9 +454,9 @@ impl NodeRef { marker::Edge > > { - let ptr = self.as_internal() as *const InternalNode as *const u8 as *mut u8; + let node = self.node; let ret = self.ascend().ok(); - Global.dealloc(ptr, Layout::new::>()); + Global.dealloc(node.as_void(), Layout::new::>()); ret } } @@ -1239,12 +1239,12 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } Global.dealloc( - right_node.node.as_ptr() as *mut u8, + right_node.node.as_void(), Layout::new::>(), ); } else { Global.dealloc( - right_node.node.as_ptr() as *mut u8, + right_node.node.as_void(), Layout::new::>(), ); } diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index e79383331e180..cfb6504e743a1 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -8,14 +8,20 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use alloc::{Excess, Layout, AllocErr, CannotReallocInPlace}; +#![allow(deprecated)] + +pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Void}; use core::alloc::Alloc as CoreAlloc; +use core::ptr::NonNull; #[doc(hidden)] pub mod __core { pub use core::*; } +#[derive(Debug)] +pub struct Excess(pub *mut u8, pub usize); + /// Compatibility with older versions of #[global_allocator] during bootstrap pub unsafe trait Alloc { unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>; @@ -42,13 +48,13 @@ pub unsafe trait Alloc { new_layout: Layout) -> Result<(), CannotReallocInPlace>; } -#[allow(deprecated)] unsafe impl Alloc for T where T: CoreAlloc { unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { - CoreAlloc::alloc(self, layout) + CoreAlloc::alloc(self, layout).map(|ptr| ptr.cast().as_ptr()) } unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + let ptr = NonNull::new_unchecked(ptr as *mut Void); CoreAlloc::dealloc(self, ptr, layout) } @@ -64,28 +70,33 @@ unsafe impl Alloc for T where T: CoreAlloc { ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<*mut u8, AllocErr> { - CoreAlloc::realloc(self, ptr, layout, new_layout.size()) + let ptr = NonNull::new_unchecked(ptr as *mut Void); + CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr()) } unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { - CoreAlloc::alloc_zeroed(self, layout) + CoreAlloc::alloc_zeroed(self, layout).map(|ptr| ptr.cast().as_ptr()) } unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { CoreAlloc::alloc_excess(self, layout) + .map(|e| Excess(e.0 .cast().as_ptr(), e.1)) } unsafe fn realloc_excess(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result { + let ptr = NonNull::new_unchecked(ptr as *mut Void); CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size()) + .map(|e| Excess(e.0 .cast().as_ptr(), e.1)) } unsafe fn grow_in_place(&mut self, ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { + let ptr = NonNull::new_unchecked(ptr as *mut Void); CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size()) } @@ -93,6 +104,7 @@ unsafe impl Alloc for T where T: CoreAlloc { ptr: *mut u8, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { + let ptr = NonNull::new_unchecked(ptr as *mut Void); CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size()) } } diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index a10820ebefd00..3a106a2ff5c3d 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -99,6 +99,7 @@ #![feature(lang_items)] #![feature(libc)] #![feature(needs_allocator)] +#![feature(nonnull_cast)] #![feature(nonzero)] #![feature(optin_builtin_traits)] #![feature(pattern)] diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 80b816878fb37..d72301f5ad640 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -12,7 +12,7 @@ use alloc::{Alloc, Layout, Global}; use core::cmp; use core::mem; use core::ops::Drop; -use core::ptr::{self, Unique}; +use core::ptr::{self, NonNull, Unique}; use core::slice; use super::boxed::Box; use super::allocator::CollectionAllocErr; @@ -90,7 +90,7 @@ impl RawVec { // handles ZSTs and `cap = 0` alike let ptr = if alloc_size == 0 { - mem::align_of::() as *mut u8 + NonNull::::dangling().as_void() } else { let align = mem::align_of::(); let result = if zeroed { @@ -105,7 +105,7 @@ impl RawVec { }; RawVec { - ptr: Unique::new_unchecked(ptr as *mut _), + ptr: ptr.cast().into(), cap, a, } @@ -310,11 +310,11 @@ impl RawVec { let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).expect("capacity overflow"); - let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8, + let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_void(), cur, new_size); match ptr_res { - Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)), + Ok(ptr) => (new_cap, ptr.cast().into()), Err(_) => self.a.oom(), } } @@ -369,8 +369,7 @@ impl RawVec { let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).expect("capacity overflow"); - let ptr = self.ptr() as *mut _; - match self.a.grow_in_place(ptr, old_layout, new_size) { + match self.a.grow_in_place(NonNull::from(self.ptr).as_void(), old_layout, new_size) { Ok(_) => { // We can't directly divide `size`. self.cap = new_cap; @@ -427,13 +426,12 @@ impl RawVec { let res = match self.current_layout() { Some(layout) => { debug_assert!(new_layout.align() == layout.align()); - let old_ptr = self.ptr.as_ptr() as *mut u8; - self.a.realloc(old_ptr, layout, new_layout.size()) + self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size()) } None => self.a.alloc(new_layout), }; - self.ptr = Unique::new_unchecked(res? as *mut T); + self.ptr = res?.cast().into(); self.cap = new_cap; Ok(()) @@ -537,13 +535,12 @@ impl RawVec { let res = match self.current_layout() { Some(layout) => { debug_assert!(new_layout.align() == layout.align()); - let old_ptr = self.ptr.as_ptr() as *mut u8; - self.a.realloc(old_ptr, layout, new_layout.size()) + self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size()) } None => self.a.alloc(new_layout), }; - self.ptr = Unique::new_unchecked(res? as *mut T); + self.ptr = res?.cast().into(); self.cap = new_cap; Ok(()) @@ -600,11 +597,12 @@ impl RawVec { // (regardless of whether `self.cap - used_cap` wrapped). // Therefore we can safely call grow_in_place. - let ptr = self.ptr() as *mut _; let new_layout = Layout::new::().repeat(new_cap).unwrap().0; // FIXME: may crash and burn on over-reserve alloc_guard(new_layout.size()).expect("capacity overflow"); - match self.a.grow_in_place(ptr, old_layout, new_layout.size()) { + match self.a.grow_in_place( + NonNull::from(self.ptr).as_void(), old_layout, new_layout.size(), + ) { Ok(_) => { self.cap = new_cap; true @@ -664,10 +662,10 @@ impl RawVec { let new_size = elem_size * amount; let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); - match self.a.realloc(self.ptr.as_ptr() as *mut u8, + match self.a.realloc(NonNull::from(self.ptr).as_void(), old_layout, new_size) { - Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T), + Ok(p) => self.ptr = p.cast().into(), Err(_) => self.a.oom(), } } @@ -700,8 +698,7 @@ impl RawVec { let elem_size = mem::size_of::(); if elem_size != 0 { if let Some(layout) = self.current_layout() { - let ptr = self.ptr() as *mut u8; - self.a.dealloc(ptr, layout); + self.a.dealloc(NonNull::from(self.ptr).as_void(), layout); } } } @@ -737,6 +734,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { #[cfg(test)] mod tests { use super::*; + use alloc::Void; #[test] fn allocator_param() { @@ -756,7 +754,7 @@ mod tests { // before allocation attempts start failing. struct BoundedAlloc { fuel: usize } unsafe impl Alloc for BoundedAlloc { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { let size = layout.size(); if size > self.fuel { return Err(AllocErr); @@ -766,7 +764,7 @@ mod tests { err @ Err(_) => err, } } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { Global.dealloc(ptr, layout) } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 3c0b11bfe747f..1c835fe50decb 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -259,7 +259,7 @@ use core::ops::CoerceUnsized; use core::ptr::{self, NonNull}; use core::convert::From; -use alloc::{Global, Alloc, Layout, box_free}; +use alloc::{Global, Alloc, Layout, Void, box_free}; use string::String; use vec::Vec; @@ -671,7 +671,7 @@ impl Rc { .unwrap_or_else(|_| Global.oom()); // Initialize the real RcBox - let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox; ptr::write(&mut (*inner).strong, Cell::new(1)); ptr::write(&mut (*inner).weak, Cell::new(1)); @@ -737,7 +737,7 @@ impl RcFromSlice for Rc<[T]> { // In the event of a panic, elements that have been written // into the new RcBox will be dropped, then the memory freed. struct Guard { - mem: *mut u8, + mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, @@ -760,14 +760,14 @@ impl RcFromSlice for Rc<[T]> { let v_ptr = v as *const [T]; let ptr = Self::allocate_for_ptr(v_ptr); - let mem = ptr as *mut _ as *mut u8; + let mem = ptr as *mut _ as *mut Void; let layout = Layout::for_value(&*ptr); // Pointer to first element let elems = &mut (*ptr).value as *mut [T] as *mut T; let mut guard = Guard{ - mem: mem, + mem: NonNull::new_unchecked(mem), elems: elems, layout: layout, n_elems: 0, @@ -834,8 +834,6 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// ``` fn drop(&mut self) { unsafe { - let ptr = self.ptr.as_ptr(); - self.dec_strong(); if self.strong() == 0 { // destroy the contained object @@ -846,7 +844,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.dec_weak(); if self.weak() == 0 { - Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)); + Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref())); } } } @@ -1266,13 +1264,11 @@ impl Drop for Weak { /// ``` fn drop(&mut self) { unsafe { - let ptr = self.ptr.as_ptr(); - self.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. if self.weak() == 0 { - Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)); + Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref())); } } } diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 328131e2fef7a..6fa88ce969a0e 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -34,7 +34,8 @@ fn check_overalign_requests(mut allocator: T) { allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap() }).collect(); for &ptr in &pointers { - assert_eq!((ptr as usize) % align, 0, "Got a pointer less aligned than requested") + assert_eq!((ptr.as_ptr() as usize) % align, 0, + "Got a pointer less aligned than requested") } // Clean up diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index c6507282b244d..bf27e972177ce 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -42,6 +42,7 @@ const MIN_ALIGN: usize = 8; const MIN_ALIGN: usize = 16; use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Void}; +use core::ptr::NonNull; #[unstable(feature = "allocator_api", issue = "32838")] pub struct System; @@ -49,26 +50,26 @@ pub struct System; #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Alloc for System { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { GlobalAlloc::alloc(self, layout).into() } #[inline] - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { GlobalAlloc::alloc_zeroed(self, layout).into() } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { - GlobalAlloc::dealloc(self, ptr as *mut Void, layout) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) } #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, old_layout: Layout, - new_size: usize) -> Result<*mut u8, AllocErr> { - GlobalAlloc::realloc(self, ptr as *mut Void, old_layout, new_size).into() + new_size: usize) -> Result, AllocErr> { + GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size).into() } #[inline] @@ -81,26 +82,26 @@ unsafe impl Alloc for System { #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl<'a> Alloc for &'a System { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { GlobalAlloc::alloc(*self, layout).into() } #[inline] - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { GlobalAlloc::alloc_zeroed(*self, layout).into() } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { - GlobalAlloc::dealloc(*self, ptr as *mut Void, layout) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + GlobalAlloc::dealloc(*self, ptr.as_ptr(), layout) } #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, old_layout: Layout, - new_size: usize) -> Result<*mut u8, AllocErr> { - GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size).into() + new_size: usize) -> Result, AllocErr> { + GlobalAlloc::realloc(*self, ptr.as_ptr(), old_layout, new_size).into() } #[inline] diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs index 7334f986f2baa..632eed960492c 100644 --- a/src/libcore/alloc.rs +++ b/src/libcore/alloc.rs @@ -42,21 +42,17 @@ impl Void { } /// Convert from a return value of GlobalAlloc::alloc to that of Alloc::alloc -impl From<*mut Void> for Result<*mut u8, AllocErr> { +impl From<*mut Void> for Result, AllocErr> { fn from(ptr: *mut Void) -> Self { - if !ptr.is_null() { - Ok(ptr as *mut u8) - } else { - Err(AllocErr) - } + NonNull::new(ptr).ok_or(AllocErr) } } /// Convert from a return value of Alloc::alloc to that of GlobalAlloc::alloc -impl From> for *mut Void { - fn from(result: Result<*mut u8, AllocErr>) -> Self { +impl From, AllocErr>> for *mut Void { + fn from(result: Result, AllocErr>) -> Self { match result { - Ok(ptr) => ptr as *mut Void, + Ok(ptr) => ptr.as_ptr(), Err(_) => Void::null_mut(), } } @@ -65,7 +61,7 @@ impl From> for *mut Void { /// Represents the combination of a starting address and /// a total capacity of the returned block. #[derive(Debug)] -pub struct Excess(pub *mut u8, pub usize); +pub struct Excess(pub NonNull, pub usize); fn size_align() -> (usize, usize) { (mem::size_of::(), mem::align_of::()) @@ -575,7 +571,7 @@ pub unsafe trait Alloc { /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>; + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr>; /// Deallocate the memory referenced by `ptr`. /// @@ -592,7 +588,7 @@ pub unsafe trait Alloc { /// * In addition to fitting the block of memory `layout`, the /// alignment of the `layout` must match the alignment used /// to allocate that block of memory. - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout); + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); /// Allocator-specific method for signaling an out-of-memory /// condition. @@ -710,9 +706,9 @@ pub unsafe trait Alloc { /// reallocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, - new_size: usize) -> Result<*mut u8, AllocErr> { + new_size: usize) -> Result, AllocErr> { let old_size = layout.size(); if new_size >= old_size { @@ -729,7 +725,9 @@ pub unsafe trait Alloc { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let result = self.alloc(new_layout); if let Ok(new_ptr) = result { - ptr::copy_nonoverlapping(ptr as *const u8, new_ptr, cmp::min(old_size, new_size)); + ptr::copy_nonoverlapping(ptr.as_ptr() as *const u8, + new_ptr.as_ptr() as *mut u8, + cmp::min(old_size, new_size)); self.dealloc(ptr, layout); } result @@ -751,11 +749,11 @@ pub unsafe trait Alloc { /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { let size = layout.size(); let p = self.alloc(layout); if let Ok(p) = p { - ptr::write_bytes(p, 0, size); + ptr::write_bytes(p.as_ptr() as *mut u8, 0, size); } p } @@ -800,7 +798,7 @@ pub unsafe trait Alloc { /// reallocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. unsafe fn realloc_excess(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_size: usize) -> Result { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); @@ -845,7 +843,7 @@ pub unsafe trait Alloc { /// `grow_in_place` failures without aborting, or to fall back on /// another reallocation method before resorting to an abort. unsafe fn grow_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_size: usize) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. @@ -900,7 +898,7 @@ pub unsafe trait Alloc { /// `shrink_in_place` failures without aborting, or to fall back /// on another reallocation method before resorting to an abort. unsafe fn shrink_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_size: usize) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. @@ -951,7 +949,7 @@ pub unsafe trait Alloc { { let k = Layout::new::(); if k.size() > 0 { - unsafe { self.alloc(k).map(|p| NonNull::new_unchecked(p as *mut T)) } + unsafe { self.alloc(k).map(|p| p.cast()) } } else { Err(AllocErr) } @@ -977,10 +975,9 @@ pub unsafe trait Alloc { unsafe fn dealloc_one(&mut self, ptr: NonNull) where Self: Sized { - let raw_ptr = ptr.as_ptr() as *mut u8; let k = Layout::new::(); if k.size() > 0 { - self.dealloc(raw_ptr, k); + self.dealloc(ptr.as_void(), k); } } @@ -1020,10 +1017,7 @@ pub unsafe trait Alloc { match Layout::array::(n) { Ok(ref layout) if layout.size() > 0 => { unsafe { - self.alloc(layout.clone()) - .map(|p| { - NonNull::new_unchecked(p as *mut T) - }) + self.alloc(layout.clone()).map(|p| p.cast()) } } _ => Err(AllocErr), @@ -1068,11 +1062,10 @@ pub unsafe trait Alloc { n_new: usize) -> Result, AllocErr> where Self: Sized { - match (Layout::array::(n_old), Layout::array::(n_new), ptr.as_ptr()) { - (Ok(ref k_old), Ok(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => { + match (Layout::array::(n_old), Layout::array::(n_new)) { + (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { debug_assert!(k_old.align() == k_new.align()); - self.realloc(ptr as *mut u8, k_old.clone(), k_new.size()) - .map(|p| NonNull::new_unchecked(p as *mut T)) + self.realloc(ptr.as_void(), k_old.clone(), k_new.size()).map(NonNull::cast) } _ => { Err(AllocErr) @@ -1103,10 +1096,9 @@ pub unsafe trait Alloc { unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), AllocErr> where Self: Sized { - let raw_ptr = ptr.as_ptr() as *mut u8; match Layout::array::(n) { Ok(ref k) if k.size() > 0 => { - Ok(self.dealloc(raw_ptr, k.clone())) + Ok(self.dealloc(ptr.as_void(), k.clone())) } _ => { Err(AllocErr) diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index c1e150e9fb909..f4e668328ce97 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -2750,6 +2750,14 @@ impl NonNull { NonNull::new_unchecked(self.as_ptr() as *mut U) } } + + /// Cast to a `Void` pointer + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn as_void(self) -> NonNull<::alloc::Void> { + unsafe { + NonNull::new_unchecked(self.as_ptr() as _) + } + } } #[stable(feature = "nonnull", since = "1.25.0")] diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index 502637051434e..38c993737880e 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -757,12 +757,10 @@ impl RawTable { let buffer = Global.alloc(Layout::from_size_align(size, alignment) .map_err(|_| CollectionAllocErr::CapacityOverflow)?)?; - let hashes = buffer as *mut HashUint; - Ok(RawTable { capacity_mask: capacity.wrapping_sub(1), size: 0, - hashes: TaggedHashUintPtr::new(hashes), + hashes: TaggedHashUintPtr::new(buffer.cast().as_ptr()), marker: marker::PhantomData, }) } @@ -1185,7 +1183,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable { debug_assert!(!oflo, "should be impossible"); unsafe { - Global.dealloc(self.hashes.ptr() as *mut u8, + Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_void(), Layout::from_size_align(size, align).unwrap()); // Remember how everything was allocated out of one buffer // during initialization? We only need one call to free here. diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index 25ba75fd35eb8..a34fcb5a7f98b 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -275,6 +275,7 @@ #![feature(macro_reexport)] #![feature(macro_vis_matcher)] #![feature(needs_panic_runtime)] +#![feature(nonnull_cast)] #![feature(exhaustive_patterns)] #![feature(nonzero)] #![feature(num_bits_bytes)] diff --git a/src/test/run-pass/allocator/xcrate-use2.rs b/src/test/run-pass/allocator/xcrate-use2.rs index 52eb963efdb74..b8e844522dc8b 100644 --- a/src/test/run-pass/allocator/xcrate-use2.rs +++ b/src/test/run-pass/allocator/xcrate-use2.rs @@ -30,7 +30,7 @@ fn main() { let layout = Layout::from_size_align(4, 2).unwrap(); // Global allocator routes to the `custom_as_global` global - let ptr = Global.alloc(layout.clone()).unwrap(); + let ptr = Global.alloc(layout.clone()); helper::work_with(&ptr); assert_eq!(custom_as_global::get(), n + 1); Global.dealloc(ptr, layout.clone()); diff --git a/src/test/run-pass/realloc-16687.rs b/src/test/run-pass/realloc-16687.rs index a562165d21b7d..49ab0ee33109d 100644 --- a/src/test/run-pass/realloc-16687.rs +++ b/src/test/run-pass/realloc-16687.rs @@ -13,10 +13,10 @@ // Ideally this would be revised to use no_std, but for now it serves // well enough to reproduce (and illustrate) the bug from #16687. -#![feature(heap_api, allocator_api)] +#![feature(heap_api, allocator_api, nonnull_cast)] -use std::heap::{Heap, Alloc, Layout}; -use std::ptr; +use std::alloc::{Global, Alloc, Layout}; +use std::ptr::{self, NonNull}; fn main() { unsafe { @@ -50,13 +50,13 @@ unsafe fn test_triangle() -> bool { println!("allocate({:?})", layout); } - let ret = Heap.alloc(layout.clone()).unwrap_or_else(|_| Heap.oom()); + let ret = Global.alloc(layout.clone()).unwrap_or_else(|_| Global.oom()); if PRINT { println!("allocate({:?}) = {:?}", layout, ret); } - ret + ret.cast().as_ptr() } unsafe fn deallocate(ptr: *mut u8, layout: Layout) { @@ -64,7 +64,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Heap.dealloc(ptr, layout); + Global.dealloc(NonNull::new_unchecked(ptr).as_void(), layout); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { @@ -72,14 +72,14 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let ret = Heap.realloc(ptr, old.clone(), new.clone()) - .unwrap_or_else(|_| Heap.oom()); + let ret = Global.realloc(NonNull::new_unchecked(ptr).as_void(), old.clone(), new.size()) + .unwrap_or_else(|_| Global.oom()); if PRINT { println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ret); } - ret + ret.cast().as_ptr() } fn idx_to_size(i: usize) -> usize { (i+1) * 10 } diff --git a/src/test/run-pass/regions-mock-trans.rs b/src/test/run-pass/regions-mock-trans.rs index 7d34b8fd00fc8..3c37243c8b938 100644 --- a/src/test/run-pass/regions-mock-trans.rs +++ b/src/test/run-pass/regions-mock-trans.rs @@ -13,6 +13,7 @@ #![feature(allocator_api)] use std::heap::{Alloc, Heap, Layout}; +use std::ptr::NonNull; struct arena(()); @@ -33,7 +34,7 @@ fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> { unsafe { let ptr = Heap.alloc(Layout::new::()) .unwrap_or_else(|_| Heap.oom()); - &*(ptr as *const _) + &*(ptr.as_ptr() as *const _) } } @@ -45,7 +46,7 @@ fn g(fcx : &Fcx) { let bcx = Bcx { fcx: fcx }; let bcx2 = h(&bcx); unsafe { - Heap.dealloc(bcx2 as *const _ as *mut _, Layout::new::()); + Heap.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); } }