diff --git a/src/dlmalloc b/src/dlmalloc index 9b2dcac06c3e2..aa73165a5d5cb 160000 --- a/src/dlmalloc +++ b/src/dlmalloc @@ -1 +1 @@ -Subproject commit 9b2dcac06c3e23235f8997b3c5f2325a6d3382df +Subproject commit aa73165a5d5cbe27ef255403d7929f91d6a17cdc diff --git a/src/doc/unstable-book/src/language-features/global-allocator.md b/src/doc/unstable-book/src/language-features/global-allocator.md index b3e6925b666b7..0cd784bbbf628 100644 --- a/src/doc/unstable-book/src/language-features/global-allocator.md +++ b/src/doc/unstable-book/src/language-features/global-allocator.md @@ -30,15 +30,16 @@ looks like: #![feature(global_allocator, allocator_api, heap_api)] use std::heap::{Alloc, System, Layout, AllocErr}; +use std::ptr::NonNull; struct MyAllocator; unsafe impl<'a> Alloc for &'a MyAllocator { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { System.alloc(layout) } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { System.dealloc(ptr, layout) } } diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index ccf2e2768d1a9..71483410dbc96 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -513,15 +513,13 @@ impl Arc { // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { - let ptr = self.ptr.as_ptr(); - // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). ptr::drop_in_place(&mut self.ptr.as_mut().data); if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)) + Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } @@ -559,7 +557,7 @@ impl Arc { .unwrap_or_else(|e| Heap.oom(e)); // Initialize the real ArcInner - let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr()) as *mut ArcInner; ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); @@ -626,7 +624,7 @@ impl ArcFromSlice for Arc<[T]> { // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. struct Guard { - mem: *mut u8, + mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, @@ -656,7 +654,7 @@ impl ArcFromSlice for Arc<[T]> { let elems = &mut (*ptr).data as *mut [T] as *mut T; let mut guard = Guard{ - mem: mem, + mem: NonNull::new_unchecked(mem), elems: elems, layout: layout, n_elems: 0, @@ -1148,8 +1146,6 @@ impl Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let ptr = self.ptr.as_ptr(); - // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about // the memory orderings @@ -1161,7 +1157,7 @@ impl Drop for Weak { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { - Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)) + Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } } diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs index 49109d522e965..06f61fdd60a2f 100644 --- a/src/liballoc/btree/node.rs +++ b/src/liballoc/btree/node.rs @@ -237,7 +237,7 @@ impl Root { pub fn pop_level(&mut self) { debug_assert!(self.height > 0); - let top = self.node.ptr.as_ptr() as *mut u8; + let top = self.node.ptr; self.node = unsafe { BoxedNode::from_ptr(self.as_mut() @@ -250,7 +250,7 @@ impl Root { self.as_mut().as_leaf_mut().parent = ptr::null(); unsafe { - Heap.dealloc(top, Layout::new::>()); + Heap.dealloc(NonNull::from(top).cast(), Layout::new::>()); } } } @@ -434,9 +434,9 @@ impl NodeRef { marker::Edge > > { - let ptr = self.as_leaf() as *const LeafNode as *const u8 as *mut u8; + let node = self.node; let ret = self.ascend().ok(); - Heap.dealloc(ptr, Layout::new::>()); + Heap.dealloc(node.cast(), Layout::new::>()); ret } } @@ -455,9 +455,9 @@ impl NodeRef { marker::Edge > > { - let ptr = self.as_internal() as *const InternalNode as *const u8 as *mut u8; + let node = self.node; let ret = self.ascend().ok(); - Heap.dealloc(ptr, Layout::new::>()); + Heap.dealloc(node.cast(), Layout::new::>()); ret } } @@ -1240,12 +1240,12 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } Heap.dealloc( - right_node.node.as_ptr() as *mut u8, + right_node.node.cast(), Layout::new::>(), ); } else { Heap.dealloc( - right_node.node.as_ptr() as *mut u8, + right_node.node.cast(), Layout::new::>(), ); } diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 9296a1130718e..287d20048ee32 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -17,10 +17,15 @@ use core::intrinsics::{min_align_of_val, size_of_val}; use core::mem::{self, ManuallyDrop}; +use core::ptr::NonNull; use core::usize; -pub use core::heap::*; +use core::heap; +pub use core::heap::{AllocErr, CannotReallocInPlace, CollectionAllocErr, Layout}; +#[cfg(not(stage0))] +pub use core::heap::{Alloc, Excess}; #[doc(hidden)] +#[cfg(stage0)] pub mod __core { pub use core::*; } @@ -77,18 +82,14 @@ extern "Rust" { #[derive(Copy, Clone, Default, Debug)] pub struct Heap; -unsafe impl Alloc for Heap { +unsafe impl heap::Alloc for Heap { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { let mut err = ManuallyDrop::new(mem::uninitialized::()); let ptr = __rust_alloc(layout.size(), layout.align(), &mut *err as *mut AllocErr as *mut u8); - if ptr.is_null() { - Err(ManuallyDrop::into_inner(err)) - } else { - Ok(ptr) - } + NonNull::new(ptr).ok_or_else(|| ManuallyDrop::into_inner(err)) } #[inline] @@ -100,8 +101,8 @@ unsafe impl Alloc for Heap { } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { - __rust_dealloc(ptr, layout.size(), layout.align()) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + __rust_dealloc(ptr.as_ptr(), layout.size(), layout.align()) } #[inline] @@ -118,85 +119,70 @@ unsafe impl Alloc for Heap { #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) - -> Result<*mut u8, AllocErr> + -> Result, AllocErr> { let mut err = ManuallyDrop::new(mem::uninitialized::()); - let ptr = __rust_realloc(ptr, + let ptr = __rust_realloc(ptr.as_ptr(), layout.size(), layout.align(), new_layout.size(), new_layout.align(), &mut *err as *mut AllocErr as *mut u8); - if ptr.is_null() { - Err(ManuallyDrop::into_inner(err)) - } else { - mem::forget(err); - Ok(ptr) - } + NonNull::new(ptr).ok_or_else(|| ManuallyDrop::into_inner(err)) } #[inline] - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { let mut err = ManuallyDrop::new(mem::uninitialized::()); let ptr = __rust_alloc_zeroed(layout.size(), layout.align(), &mut *err as *mut AllocErr as *mut u8); - if ptr.is_null() { - Err(ManuallyDrop::into_inner(err)) - } else { - Ok(ptr) - } + NonNull::new(ptr).ok_or_else(|| ManuallyDrop::into_inner(err)) } #[inline] - unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { + unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { let mut err = ManuallyDrop::new(mem::uninitialized::()); let mut size = 0; let ptr = __rust_alloc_excess(layout.size(), layout.align(), &mut size, &mut *err as *mut AllocErr as *mut u8); - if ptr.is_null() { - Err(ManuallyDrop::into_inner(err)) - } else { - Ok(Excess(ptr, size)) - } + NonNull::new(ptr).map(|p| heap::Excess(p, size)) + .ok_or_else(|| ManuallyDrop::into_inner(err)) } #[inline] unsafe fn realloc_excess(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, - new_layout: Layout) -> Result { + new_layout: Layout) -> Result { let mut err = ManuallyDrop::new(mem::uninitialized::()); let mut size = 0; - let ptr = __rust_realloc_excess(ptr, + let ptr = __rust_realloc_excess(ptr.as_ptr(), layout.size(), layout.align(), new_layout.size(), new_layout.align(), &mut size, &mut *err as *mut AllocErr as *mut u8); - if ptr.is_null() { - Err(ManuallyDrop::into_inner(err)) - } else { - Ok(Excess(ptr, size)) - } + NonNull::new(ptr).map(|p| heap::Excess(p, size)) + .ok_or_else(|| ManuallyDrop::into_inner(err)) } #[inline] unsafe fn grow_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { debug_assert!(new_layout.size() >= layout.size()); debug_assert!(new_layout.align() == layout.align()); - let ret = __rust_grow_in_place(ptr, + let ret = __rust_grow_in_place(ptr.as_ptr(), layout.size(), layout.align(), new_layout.size(), @@ -210,12 +196,12 @@ unsafe impl Alloc for Heap { #[inline] unsafe fn shrink_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { debug_assert!(new_layout.size() <= layout.size()); debug_assert!(new_layout.align() == layout.align()); - let ret = __rust_shrink_in_place(ptr, + let ret = __rust_shrink_in_place(ptr.as_ptr(), layout.size(), layout.align(), new_layout.size(), @@ -228,6 +214,76 @@ unsafe impl Alloc for Heap { } } +// When building stage0 with an older rustc, #[global_allocator] will +// expand to code using alloc::heap::Alloc and expecting the old API. +// A newer rustc's #[global_allocator] expansion uses core::heap::Alloc +// and the new API. For stage0, we thus expose the old API from this +// module. +#[cfg(stage0)] +#[derive(Debug)] +pub struct Excess(pub *mut u8, pub usize); + +#[cfg(stage0)] +pub unsafe trait Alloc: heap::Alloc { + unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + heap::Alloc::alloc(self, layout).map(|p| p.as_ptr()) + } + + unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + heap::Alloc::dealloc(self, NonNull::new_unchecked(ptr), layout) + } + + fn oom(&mut self, e: AllocErr) -> ! { + heap::Alloc::oom(self, e) + } + + fn usable_size(&self, layout: &Layout) -> (usize, usize) { + heap::Alloc::usable_size(self, layout) + } + + unsafe fn realloc(&mut self, + ptr: *mut u8, + layout: Layout, + new_layout: Layout) -> Result<*mut u8, AllocErr> { + heap::Alloc::realloc(self, NonNull::new_unchecked(ptr), layout, new_layout) + .map(|p| p.as_ptr()) + } + + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + heap::Alloc::alloc_zeroed(self, layout).map(|p| p.as_ptr()) + } + + unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { + heap::Alloc::alloc_excess(self, layout) + .map(|heap::Excess(p, s)| Excess(p.as_ptr(), s)) + } + + unsafe fn realloc_excess(&mut self, + ptr: *mut u8, + layout: Layout, + new_layout: Layout) -> Result { + heap::Alloc::realloc_excess(self, NonNull::new_unchecked(ptr), layout, new_layout) + .map(|heap::Excess(p, s)| Excess(p.as_ptr(), s)) + } + + unsafe fn grow_in_place(&mut self, + ptr: *mut u8, + layout: Layout, + new_layout: Layout) -> Result<(), CannotReallocInPlace> { + heap::Alloc::grow_in_place(self, NonNull::new_unchecked(ptr), layout, new_layout) + } + + unsafe fn shrink_in_place(&mut self, + ptr: *mut u8, + layout: Layout, + new_layout: Layout) -> Result<(), CannotReallocInPlace> { + heap::Alloc::shrink_in_place(self, NonNull::new_unchecked(ptr), layout, new_layout) + } +} + +#[cfg(stage0)] +unsafe impl Alloc for T {} + /// The allocator for unique pointers. // This function must not unwind. If it does, MIR trans will fail. #[cfg(not(test))] @@ -238,9 +294,9 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { align as *mut u8 } else { let layout = Layout::from_size_align_unchecked(size, align); - Heap.alloc(layout).unwrap_or_else(|err| { - Heap.oom(err) - }) + heap::Alloc::alloc(&mut Heap, layout).unwrap_or_else(|err| { + heap::Alloc::oom(&mut Heap, err) + }).as_ptr() } } @@ -252,7 +308,7 @@ pub(crate) unsafe fn box_free(ptr: *mut T) { // We do not allocate for Box when T is ZST, so deallocation is also not necessary. if size != 0 { let layout = Layout::from_size_align_unchecked(size, align); - Heap.dealloc(ptr as *mut u8, layout); + heap::Alloc::dealloc(&mut Heap, NonNull::new_unchecked(ptr).cast(), layout); } } @@ -261,7 +317,8 @@ mod tests { extern crate test; use self::test::Bencher; use boxed::Box; - use heap::{Heap, Alloc, Layout}; + use heap::Heap; + use core::heap::{Alloc, Layout}; #[test] fn allocate_zeroed() { @@ -270,8 +327,8 @@ mod tests { let ptr = Heap.alloc_zeroed(layout.clone()) .unwrap_or_else(|e| Heap.oom(e)); - let end = ptr.offset(layout.size() as isize); - let mut i = ptr; + let end = ptr.as_ptr().offset(layout.size() as isize); + let mut i = ptr.as_ptr(); while i < end { assert_eq!(*i, 0); i = i.offset(1); diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 2fad3b0bad4ac..73d76c7b5217f 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -102,6 +102,7 @@ #![feature(iter_rfold)] #![feature(lang_items)] #![feature(needs_allocator)] +#![feature(nonnull_cast)] #![feature(nonzero)] #![feature(offset_to)] #![feature(optin_builtin_traits)] diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 3edce8aebdf39..842481a719924 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -12,7 +12,7 @@ use core::cmp; use core::heap::{Alloc, Layout}; use core::mem; use core::ops::Drop; -use core::ptr::{self, Unique}; +use core::ptr::{self, NonNull, Unique}; use core::slice; use heap::Heap; use super::boxed::Box; @@ -91,7 +91,7 @@ impl RawVec { // handles ZSTs and `cap = 0` alike let ptr = if alloc_size == 0 { - mem::align_of::() as *mut u8 + NonNull::::dangling().cast() } else { let align = mem::align_of::(); let result = if zeroed { @@ -106,7 +106,7 @@ impl RawVec { }; RawVec { - ptr: Unique::new_unchecked(ptr as *mut _), + ptr: ptr.cast().into(), cap, a, } @@ -312,11 +312,11 @@ impl RawVec { let new_size = new_cap * elem_size; let new_layout = Layout::from_size_align_unchecked(new_size, cur.align()); alloc_guard(new_size).expect("capacity overflow"); - let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8, + let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_layout); match ptr_res { - Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)), + Ok(ptr) => (new_cap, ptr.cast().into()), Err(e) => self.a.oom(e), } } @@ -371,9 +371,8 @@ impl RawVec { let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).expect("capacity overflow"); - let ptr = self.ptr() as *mut _; let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align()); - match self.a.grow_in_place(ptr, old_layout, new_layout) { + match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_layout) { Ok(_) => { // We can't directly divide `size`. self.cap = new_cap; @@ -429,13 +428,12 @@ impl RawVec { let res = match self.current_layout() { Some(layout) => { - let old_ptr = self.ptr.as_ptr() as *mut u8; - self.a.realloc(old_ptr, layout, new_layout) + self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout) } None => self.a.alloc(new_layout), }; - self.ptr = Unique::new_unchecked(res? as *mut T); + self.ptr = res?.cast().into(); self.cap = new_cap; Ok(()) @@ -538,13 +536,12 @@ impl RawVec { let res = match self.current_layout() { Some(layout) => { - let old_ptr = self.ptr.as_ptr() as *mut u8; - self.a.realloc(old_ptr, layout, new_layout) + self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout) } None => self.a.alloc(new_layout), }; - self.ptr = Unique::new_unchecked(res? as *mut T); + self.ptr = res?.cast().into(); self.cap = new_cap; Ok(()) @@ -601,11 +598,10 @@ impl RawVec { // (regardless of whether `self.cap - used_cap` wrapped). // Therefore we can safely call grow_in_place. - let ptr = self.ptr() as *mut _; let new_layout = Layout::new::().repeat(new_cap).unwrap().0; // FIXME: may crash and burn on over-reserve alloc_guard(new_layout.size()).expect("capacity overflow"); - match self.a.grow_in_place(ptr, old_layout, new_layout) { + match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_layout) { Ok(_) => { self.cap = new_cap; true @@ -666,10 +662,10 @@ impl RawVec { let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); let new_layout = Layout::from_size_align_unchecked(new_size, align); - match self.a.realloc(self.ptr.as_ptr() as *mut u8, + match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_layout) { - Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T), + Ok(p) => self.ptr = p.cast().into(), Err(err) => self.a.oom(err), } } @@ -702,8 +698,7 @@ impl RawVec { let elem_size = mem::size_of::(); if elem_size != 0 { if let Some(layout) = self.current_layout() { - let ptr = self.ptr() as *mut u8; - self.a.dealloc(ptr, layout); + self.a.dealloc(NonNull::from(self.ptr).cast(), layout); } } } @@ -758,7 +753,7 @@ mod tests { // before allocation attempts start failing. struct BoundedAlloc { fuel: usize } unsafe impl Alloc for BoundedAlloc { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { let size = layout.size(); if size > self.fuel { return Err(AllocErr::Unsupported { details: "fuel exhausted" }); @@ -768,7 +763,7 @@ mod tests { err @ Err(_) => err, } } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { Heap.dealloc(ptr, layout) } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 8bdc57f96a6d5..3bba8d673b513 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -672,7 +672,7 @@ impl Rc { .unwrap_or_else(|e| Heap.oom(e)); // Initialize the real RcBox - let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr()) as *mut RcBox; ptr::write(&mut (*inner).strong, Cell::new(1)); ptr::write(&mut (*inner).weak, Cell::new(1)); @@ -738,7 +738,7 @@ impl RcFromSlice for Rc<[T]> { // In the event of a panic, elements that have been written // into the new RcBox will be dropped, then the memory freed. struct Guard { - mem: *mut u8, + mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, @@ -768,7 +768,7 @@ impl RcFromSlice for Rc<[T]> { let elems = &mut (*ptr).value as *mut [T] as *mut T; let mut guard = Guard{ - mem: mem, + mem: NonNull::new_unchecked(mem), elems: elems, layout: layout, n_elems: 0, @@ -835,8 +835,6 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// ``` fn drop(&mut self) { unsafe { - let ptr = self.ptr.as_ptr(); - self.dec_strong(); if self.strong() == 0 { // destroy the contained object @@ -847,7 +845,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.dec_weak(); if self.weak() == 0 { - Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)); + Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } @@ -1267,13 +1265,11 @@ impl Drop for Weak { /// ``` fn drop(&mut self) { unsafe { - let ptr = self.ptr.as_ptr(); - self.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. if self.weak() == 0 { - Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)); + Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index d3ce12056bb49..7caee41f9b055 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -34,7 +34,8 @@ fn check_overalign_requests(mut allocator: T) { allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap() }).collect(); for &ptr in &pointers { - assert_eq!((ptr as usize) % align, 0, "Got a pointer less aligned than requested") + assert_eq!((ptr.as_ptr() as usize) % align, 0, + "Got a pointer less aligned than requested") } // Clean up diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index 6c1e9cb0b9cfd..ae2b4cfda670d 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -43,6 +43,7 @@ const MIN_ALIGN: usize = 8; const MIN_ALIGN: usize = 16; use core::heap::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace}; +use core::ptr::NonNull; #[unstable(feature = "allocator_api", issue = "32838")] pub struct System; @@ -50,27 +51,27 @@ pub struct System; #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Alloc for System { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { (&*self).alloc(layout) } #[inline] unsafe fn alloc_zeroed(&mut self, layout: Layout) - -> Result<*mut u8, AllocErr> + -> Result, AllocErr> { (&*self).alloc_zeroed(layout) } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { (&*self).dealloc(ptr, layout) } #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, old_layout: Layout, - new_layout: Layout) -> Result<*mut u8, AllocErr> { + new_layout: Layout) -> Result, AllocErr> { (&*self).realloc(ptr, old_layout, new_layout) } @@ -90,7 +91,7 @@ unsafe impl Alloc for System { #[inline] unsafe fn realloc_excess(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result { (&*self).realloc_excess(ptr, layout, new_layout) @@ -98,7 +99,7 @@ unsafe impl Alloc for System { #[inline] unsafe fn grow_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { (&*self).grow_in_place(ptr, layout, new_layout) @@ -106,7 +107,7 @@ unsafe impl Alloc for System { #[inline] unsafe fn shrink_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { (&*self).shrink_in_place(ptr, layout, new_layout) @@ -118,7 +119,7 @@ mod platform { extern crate libc; use core::cmp; - use core::ptr; + use core::ptr::{self, NonNull}; use MIN_ALIGN; use System; @@ -127,49 +128,41 @@ mod platform { #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl<'a> Alloc for &'a System { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { let ptr = if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { libc::malloc(layout.size()) as *mut u8 } else { aligned_malloc(&layout) }; - if !ptr.is_null() { - Ok(ptr) - } else { - Err(AllocErr::Exhausted { request: layout }) - } + NonNull::new(ptr).ok_or_else(|| AllocErr::Exhausted { request: layout }) } #[inline] unsafe fn alloc_zeroed(&mut self, layout: Layout) - -> Result<*mut u8, AllocErr> + -> Result, AllocErr> { if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { let ptr = libc::calloc(layout.size(), 1) as *mut u8; - if !ptr.is_null() { - Ok(ptr) - } else { - Err(AllocErr::Exhausted { request: layout }) - } + NonNull::new(ptr).ok_or_else(|| AllocErr::Exhausted { request: layout }) } else { let ret = self.alloc(layout.clone()); if let Ok(ptr) = ret { - ptr::write_bytes(ptr, 0, layout.size()); + ptr::write_bytes(ptr.as_ptr(), 0, layout.size()); } ret } } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, _layout: Layout) { - libc::free(ptr as *mut libc::c_void) + unsafe fn dealloc(&mut self, ptr: NonNull, _layout: Layout) { + libc::free(ptr.as_ptr() as *mut libc::c_void) } #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, old_layout: Layout, - new_layout: Layout) -> Result<*mut u8, AllocErr> { + new_layout: Layout) -> Result, AllocErr> { if old_layout.align() != new_layout.align() { return Err(AllocErr::Unsupported { details: "cannot change alignment on `realloc`", @@ -177,17 +170,14 @@ mod platform { } if new_layout.align() <= MIN_ALIGN && new_layout.align() <= new_layout.size(){ - let ptr = libc::realloc(ptr as *mut libc::c_void, new_layout.size()); - if !ptr.is_null() { - Ok(ptr as *mut u8) - } else { - Err(AllocErr::Exhausted { request: new_layout }) - } + let ptr = libc::realloc(ptr.as_ptr() as *mut libc::c_void, new_layout.size()); + NonNull::new(ptr as *mut u8) + .ok_or_else(|| AllocErr::Exhausted { request: new_layout }) } else { let res = self.alloc(new_layout.clone()); if let Ok(new_ptr) = res { let size = cmp::min(old_layout.size(), new_layout.size()); - ptr::copy_nonoverlapping(ptr, new_ptr, size); + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), size); self.dealloc(ptr, old_layout); } res @@ -272,7 +262,7 @@ mod platform { #[allow(bad_style)] mod platform { use core::cmp; - use core::ptr; + use core::ptr::{self, NonNull}; use MIN_ALIGN; use System; @@ -321,7 +311,7 @@ mod platform { #[inline] unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) - -> Result<*mut u8, AllocErr> + -> Result, AllocErr> { let ptr = if layout.align() <= MIN_ALIGN { HeapAlloc(GetProcessHeap(), flags, layout.size()) @@ -334,35 +324,31 @@ mod platform { align_ptr(ptr, layout.align()) } }; - if ptr.is_null() { - Err(AllocErr::Exhausted { request: layout }) - } else { - Ok(ptr as *mut u8) - } + NonNull::new(ptr).ok_or_else(|| AllocErr::Exhausted { request: layout }) } #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl<'a> Alloc for &'a System { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { allocate_with_flags(layout, 0) } #[inline] unsafe fn alloc_zeroed(&mut self, layout: Layout) - -> Result<*mut u8, AllocErr> + -> Result, AllocErr> { allocate_with_flags(layout, HEAP_ZERO_MEMORY) } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { if layout.align() <= MIN_ALIGN { - let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID); + let err = HeapFree(GetProcessHeap(), 0, ptr.as_ptr() as LPVOID); debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); } else { - let header = get_header(ptr); + let header = get_header(ptr.as_ptr()); let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID); debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); @@ -371,9 +357,9 @@ mod platform { #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, old_layout: Layout, - new_layout: Layout) -> Result<*mut u8, AllocErr> { + new_layout: Layout) -> Result, AllocErr> { if old_layout.align() != new_layout.align() { return Err(AllocErr::Unsupported { details: "cannot change alignment on `realloc`", @@ -383,18 +369,14 @@ mod platform { if new_layout.align() <= MIN_ALIGN { let ptr = HeapReAlloc(GetProcessHeap(), 0, - ptr as LPVOID, + ptr.as_ptr() as LPVOID, new_layout.size()); - if !ptr.is_null() { - Ok(ptr as *mut u8) - } else { - Err(AllocErr::Exhausted { request: new_layout }) - } + NonNull::new(ptr).ok_or_else(|| AllocErr::Exhausted { request: new_layout }) } else { let res = self.alloc(new_layout.clone()); if let Ok(new_ptr) = res { let size = cmp::min(old_layout.size(), new_layout.size()); - ptr::copy_nonoverlapping(ptr, new_ptr, size); + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), size); self.dealloc(ptr, old_layout); } res @@ -403,7 +385,7 @@ mod platform { #[inline] unsafe fn grow_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { self.shrink_in_place(ptr, layout, new_layout) @@ -411,7 +393,7 @@ mod platform { #[inline] unsafe fn shrink_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, old_layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { if old_layout.align() != new_layout.align() { @@ -421,10 +403,10 @@ mod platform { let new = if new_layout.align() <= MIN_ALIGN { HeapReAlloc(GetProcessHeap(), HEAP_REALLOC_IN_PLACE_ONLY, - ptr as LPVOID, + ptr.as_ptr() as LPVOID, new_layout.size()) } else { - let header = get_header(ptr); + let header = get_header(ptr.as_ptr()); HeapReAlloc(GetProcessHeap(), HEAP_REALLOC_IN_PLACE_ONLY, header.0 as LPVOID, @@ -489,33 +471,34 @@ mod platform { extern crate dlmalloc; use core::heap::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace}; + use core::ptr::NonNull; use System; use self::dlmalloc::GlobalDlmalloc; #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl<'a> Alloc for &'a System { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { GlobalDlmalloc.alloc(layout) } #[inline] unsafe fn alloc_zeroed(&mut self, layout: Layout) - -> Result<*mut u8, AllocErr> + -> Result, AllocErr> { GlobalDlmalloc.alloc_zeroed(layout) } #[inline] - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { GlobalDlmalloc.dealloc(ptr, layout) } #[inline] unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, old_layout: Layout, - new_layout: Layout) -> Result<*mut u8, AllocErr> { + new_layout: Layout) -> Result, AllocErr> { GlobalDlmalloc.realloc(ptr, old_layout, new_layout) } @@ -531,7 +514,7 @@ mod platform { #[inline] unsafe fn realloc_excess(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result { GlobalDlmalloc.realloc_excess(ptr, layout, new_layout) @@ -539,7 +522,7 @@ mod platform { #[inline] unsafe fn grow_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { GlobalDlmalloc.grow_in_place(ptr, layout, new_layout) @@ -547,7 +530,7 @@ mod platform { #[inline] unsafe fn shrink_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { GlobalDlmalloc.shrink_in_place(ptr, layout, new_layout) diff --git a/src/libcore/heap.rs b/src/libcore/heap.rs index dae60b1647ff5..741bc922cfc36 100644 --- a/src/libcore/heap.rs +++ b/src/libcore/heap.rs @@ -24,7 +24,7 @@ use ptr::{self, NonNull}; /// Represents the combination of a starting address and /// a total capacity of the returned block. #[derive(Debug)] -pub struct Excess(pub *mut u8, pub usize); +pub struct Excess(pub NonNull, pub usize); fn size_align() -> (usize, usize) { (mem::size_of::(), mem::align_of::()) @@ -522,7 +522,7 @@ pub unsafe trait Alloc { /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>; + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr>; /// Deallocate the memory referenced by `ptr`. /// @@ -539,7 +539,7 @@ pub unsafe trait Alloc { /// * In addition to fitting the block of memory `layout`, the /// alignment of the `layout` must match the alignment used /// to allocate that block of memory. - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout); + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); /// Allocator-specific method for signaling an out-of-memory /// condition. @@ -672,9 +672,9 @@ pub unsafe trait Alloc { /// reallocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. unsafe fn realloc(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, - new_layout: Layout) -> Result<*mut u8, AllocErr> { + new_layout: Layout) -> Result, AllocErr> { let new_size = new_layout.size(); let old_size = layout.size(); let aligns_match = layout.align == new_layout.align; @@ -692,7 +692,9 @@ pub unsafe trait Alloc { // otherwise, fall back on alloc + copy + dealloc. let result = self.alloc(new_layout); if let Ok(new_ptr) = result { - ptr::copy_nonoverlapping(ptr as *const u8, new_ptr, cmp::min(old_size, new_size)); + ptr::copy_nonoverlapping(ptr.as_ptr() as *const u8, + new_ptr.as_ptr(), + cmp::min(old_size, new_size)); self.dealloc(ptr, layout); } result @@ -714,11 +716,11 @@ pub unsafe trait Alloc { /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { let size = layout.size(); let p = self.alloc(layout); if let Ok(p) = p { - ptr::write_bytes(p, 0, size); + ptr::write_bytes(p.as_ptr(), 0, size); } p } @@ -763,7 +765,7 @@ pub unsafe trait Alloc { /// reallocation error are encouraged to call the allocator's `oom` /// method, rather than directly invoking `panic!` or similar. unsafe fn realloc_excess(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result { let usable_size = self.usable_size(&new_layout); @@ -808,7 +810,7 @@ pub unsafe trait Alloc { /// `grow_in_place` failures without aborting, or to fall back on /// another reallocation method before resorting to an abort. unsafe fn grow_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. @@ -866,7 +868,7 @@ pub unsafe trait Alloc { /// `shrink_in_place` failures without aborting, or to fall back /// on another reallocation method before resorting to an abort. unsafe fn shrink_in_place(&mut self, - ptr: *mut u8, + ptr: NonNull, layout: Layout, new_layout: Layout) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. @@ -918,7 +920,7 @@ pub unsafe trait Alloc { { let k = Layout::new::(); if k.size() > 0 { - unsafe { self.alloc(k).map(|p| NonNull::new_unchecked(p as *mut T)) } + unsafe { self.alloc(k).map(|p| p.cast()) } } else { Err(AllocErr::invalid_input("zero-sized type invalid for alloc_one")) } @@ -944,10 +946,9 @@ pub unsafe trait Alloc { unsafe fn dealloc_one(&mut self, ptr: NonNull) where Self: Sized { - let raw_ptr = ptr.as_ptr() as *mut u8; let k = Layout::new::(); if k.size() > 0 { - self.dealloc(raw_ptr, k); + self.dealloc(ptr.cast(), k); } } @@ -987,10 +988,7 @@ pub unsafe trait Alloc { match Layout::array::(n) { Some(ref layout) if layout.size() > 0 => { unsafe { - self.alloc(layout.clone()) - .map(|p| { - NonNull::new_unchecked(p as *mut T) - }) + self.alloc(layout.clone()).map(|p| p.cast()) } } _ => Err(AllocErr::invalid_input("invalid layout for alloc_array")), @@ -1035,10 +1033,9 @@ pub unsafe trait Alloc { n_new: usize) -> Result, AllocErr> where Self: Sized { - match (Layout::array::(n_old), Layout::array::(n_new), ptr.as_ptr()) { - (Some(ref k_old), Some(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => { - self.realloc(ptr as *mut u8, k_old.clone(), k_new.clone()) - .map(|p| NonNull::new_unchecked(p as *mut T)) + match (Layout::array::(n_old), Layout::array::(n_new)) { + (Some(ref k_old), Some(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { + self.realloc(ptr.cast(), k_old.clone(), k_new.clone()).map(|p| p.cast()) } _ => { Err(AllocErr::invalid_input("invalid layout for realloc_array")) @@ -1069,10 +1066,9 @@ pub unsafe trait Alloc { unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), AllocErr> where Self: Sized { - let raw_ptr = ptr.as_ptr() as *mut u8; match Layout::array::(n) { Some(ref k) if k.size() > 0 => { - Ok(self.dealloc(raw_ptr, k.clone())) + Ok(self.dealloc(ptr.cast(), k.clone())) } _ => { Err(AllocErr::invalid_input("invalid layout for dealloc_array")) diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs index ee38cca7828be..8f0fdd47f8c0e 100644 --- a/src/librustc_allocator/expand.rs +++ b/src/librustc_allocator/expand.rs @@ -88,7 +88,7 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> { span, kind: AllocatorKind::Global, global: item.ident, - alloc: Ident::from_str("alloc"), + core: Ident::from_str("core"), cx: ExtCtxt::new(self.sess, ecfg, self.resolver), }; let super_path = f.cx.path(f.span, vec![ @@ -96,7 +96,7 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> { f.global, ]); let mut items = vec![ - f.cx.item_extern_crate(f.span, f.alloc), + f.cx.item_extern_crate(f.span, f.core), f.cx.item_use_simple( f.span, respan(f.span.shrink_to_lo(), VisibilityKind::Inherited), @@ -126,7 +126,7 @@ struct AllocFnFactory<'a> { span: Span, kind: AllocatorKind, global: Ident, - alloc: Ident, + core: Ident, cx: ExtCtxt<'a>, } @@ -159,7 +159,7 @@ impl<'a> AllocFnFactory<'a> { fn call_allocator(&self, method: &str, mut args: Vec>) -> P { let method = self.cx.path(self.span, vec![ - self.alloc, + self.core, Ident::from_str("heap"), Ident::from_str("Alloc"), Ident::from_str(method), @@ -205,7 +205,7 @@ impl<'a> AllocFnFactory<'a> { args.push(self.cx.arg(self.span, align, ty_usize)); let layout_new = self.cx.path(self.span, vec![ - self.alloc, + self.core, Ident::from_str("heap"), Ident::from_str("Layout"), Ident::from_str("from_size_align_unchecked"), @@ -250,9 +250,17 @@ impl<'a> AllocFnFactory<'a> { } AllocatorTy::Ptr => { + let nonnull_new = self.cx.path(self.span, vec![ + self.core, + Ident::from_str("ptr"), + Ident::from_str("NonNull"), + Ident::from_str("new_unchecked"), + ]); + let nonnull_new = self.cx.expr_path(nonnull_new); let ident = ident(); args.push(self.cx.arg(self.span, ident, self.ptr_u8())); - self.cx.expr_ident(self.span, ident) + let expr = self.cx.expr_ident(self.span, ident); + self.cx.expr_call(self.span, nonnull_new, vec![expr]) } AllocatorTy::ResultPtr | @@ -314,7 +322,7 @@ impl<'a> AllocFnFactory<'a> { // match #expr { // Ok(ptr) => { // *excess = ptr.1; - // ptr.0 + // ptr.0.as_ptr() // } // Err(e) => { // ptr::write(err_ptr, e); @@ -344,6 +352,12 @@ impl<'a> AllocFnFactory<'a> { let ret = self.cx.expr_tup_field_access(self.span, ptr.clone(), 0); + let ret = self.cx.expr_method_call( + self.span, + ret, + Ident::from_str("as_ptr"), + Vec::new() + ); let ret = self.cx.stmt_expr(ret); let block = self.cx.block(self.span, vec![write, ret]); self.cx.expr_block(block) @@ -357,9 +371,7 @@ impl<'a> AllocFnFactory<'a> { let err_expr = { let err = self.cx.expr_ident(self.span, name); let write = self.cx.path(self.span, vec![ - self.alloc, - Ident::from_str("heap"), - Ident::from_str("__core"), + self.core, Ident::from_str("ptr"), Ident::from_str("write"), ]); @@ -386,7 +398,7 @@ impl<'a> AllocFnFactory<'a> { // We're creating: // // match #expr { - // Ok(ptr) => ptr, + // Ok(ptr) => ptr.as_ptr(), // Err(e) => { // ptr::write(err_ptr, e); // 0 as *mut u8 @@ -402,6 +414,12 @@ impl<'a> AllocFnFactory<'a> { let name = ident(); let ok_expr = self.cx.expr_ident(self.span, name); + let ok_expr = self.cx.expr_method_call( + self.span, + ok_expr, + Ident::from_str("as_ptr"), + Vec::new() + ); let pat = self.cx.pat_ident(self.span, name); let ok = self.cx.path_ident(self.span, Ident::from_str("Ok")); let ok = self.cx.pat_tuple_struct(self.span, ok, vec![pat]); @@ -411,9 +429,7 @@ impl<'a> AllocFnFactory<'a> { let err_expr = { let err = self.cx.expr_ident(self.span, name); let write = self.cx.path(self.span, vec![ - self.alloc, - Ident::from_str("heap"), - Ident::from_str("__core"), + self.core, Ident::from_str("ptr"), Ident::from_str("write"), ]); @@ -484,7 +500,7 @@ impl<'a> AllocFnFactory<'a> { fn layout_ptr(&self) -> P { let layout = self.cx.path(self.span, vec![ - self.alloc, + self.core, Ident::from_str("heap"), Ident::from_str("Layout"), ]); @@ -494,7 +510,7 @@ impl<'a> AllocFnFactory<'a> { fn alloc_err_ptr(&self) -> P { let err = self.cx.path(self.span, vec![ - self.alloc, + self.core, Ident::from_str("heap"), Ident::from_str("AllocErr"), ]); diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index fa6053d3f6d8e..d8b187135133a 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -760,12 +760,10 @@ impl RawTable { let buffer = Heap.alloc(Layout::from_size_align(size, alignment) .ok_or(CollectionAllocErr::CapacityOverflow)?)?; - let hashes = buffer as *mut HashUint; - Ok(RawTable { capacity_mask: capacity.wrapping_sub(1), size: 0, - hashes: TaggedHashUintPtr::new(hashes), + hashes: TaggedHashUintPtr::new(buffer.cast().as_ptr()), marker: marker::PhantomData, }) } @@ -1188,7 +1186,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable { debug_assert!(!oflo, "should be impossible"); unsafe { - Heap.dealloc(self.hashes.ptr() as *mut u8, + Heap.dealloc(NonNull::new_unchecked(self.hashes.ptr()).cast(), Layout::from_size_align(size, align).unwrap()); // Remember how everything was allocated out of one buffer // during initialization? We only need one call to free here. diff --git a/src/libstd/heap.rs b/src/libstd/heap.rs index 4a391372c3a11..d1b3a87235385 100644 --- a/src/libstd/heap.rs +++ b/src/libstd/heap.rs @@ -21,7 +21,7 @@ pub use core::heap::*; #[allow(unused_attributes)] pub mod __default_lib_allocator { use super::{System, Layout, Alloc, AllocErr}; - use ptr; + use ptr::{self, NonNull}; // for symbol names src/librustc/middle/allocator.rs // for signatures src/librustc_allocator/lib.rs @@ -36,7 +36,7 @@ pub mod __default_lib_allocator { err: *mut u8) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); match System.alloc(layout) { - Ok(p) => p, + Ok(p) => p.as_ptr(), Err(e) => { ptr::write(err as *mut AllocErr, e); 0 as *mut u8 @@ -55,7 +55,8 @@ pub mod __default_lib_allocator { pub unsafe extern fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) { - System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) + System.dealloc(NonNull::new_unchecked(ptr), + Layout::from_size_align_unchecked(size, align)) } #[no_mangle] @@ -78,8 +79,8 @@ pub mod __default_lib_allocator { err: *mut u8) -> *mut u8 { let old_layout = Layout::from_size_align_unchecked(old_size, old_align); let new_layout = Layout::from_size_align_unchecked(new_size, new_align); - match System.realloc(ptr, old_layout, new_layout) { - Ok(p) => p, + match System.realloc(NonNull::new_unchecked(ptr), old_layout, new_layout) { + Ok(p) => p.as_ptr(), Err(e) => { ptr::write(err as *mut AllocErr, e); 0 as *mut u8 @@ -94,7 +95,7 @@ pub mod __default_lib_allocator { err: *mut u8) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); match System.alloc_zeroed(layout) { - Ok(p) => p, + Ok(p) => p.as_ptr(), Err(e) => { ptr::write(err as *mut AllocErr, e); 0 as *mut u8 @@ -112,7 +113,7 @@ pub mod __default_lib_allocator { match System.alloc_excess(layout) { Ok(p) => { *excess = p.1; - p.0 + p.0.as_ptr() } Err(e) => { ptr::write(err as *mut AllocErr, e); @@ -132,10 +133,10 @@ pub mod __default_lib_allocator { err: *mut u8) -> *mut u8 { let old_layout = Layout::from_size_align_unchecked(old_size, old_align); let new_layout = Layout::from_size_align_unchecked(new_size, new_align); - match System.realloc_excess(ptr, old_layout, new_layout) { + match System.realloc_excess(NonNull::new_unchecked(ptr), old_layout, new_layout) { Ok(p) => { *excess = p.1; - p.0 + p.0.as_ptr() } Err(e) => { ptr::write(err as *mut AllocErr, e); @@ -153,7 +154,7 @@ pub mod __default_lib_allocator { new_align: usize) -> u8 { let old_layout = Layout::from_size_align_unchecked(old_size, old_align); let new_layout = Layout::from_size_align_unchecked(new_size, new_align); - match System.grow_in_place(ptr, old_layout, new_layout) { + match System.grow_in_place(NonNull::new_unchecked(ptr), old_layout, new_layout) { Ok(()) => 1, Err(_) => 0, } @@ -168,7 +169,7 @@ pub mod __default_lib_allocator { new_align: usize) -> u8 { let old_layout = Layout::from_size_align_unchecked(old_size, old_align); let new_layout = Layout::from_size_align_unchecked(new_size, new_align); - match System.shrink_in_place(ptr, old_layout, new_layout) { + match System.shrink_in_place(NonNull::new_unchecked(ptr), old_layout, new_layout) { Ok(()) => 1, Err(_) => 0, } diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index e18e055654bcb..1a956445b1ce3 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -280,6 +280,7 @@ #![feature(macro_reexport)] #![feature(macro_vis_matcher)] #![feature(needs_panic_runtime)] +#![feature(nonnull_cast)] #![feature(exhaustive_patterns)] #![feature(nonzero)] #![feature(num_bits_bytes)] diff --git a/src/test/run-make-fulldeps/std-core-cycle/bar.rs b/src/test/run-make-fulldeps/std-core-cycle/bar.rs index 6def5b6f5e181..3d233e581ace7 100644 --- a/src/test/run-make-fulldeps/std-core-cycle/bar.rs +++ b/src/test/run-make-fulldeps/std-core-cycle/bar.rs @@ -12,15 +12,16 @@ #![crate_type = "rlib"] use std::heap::*; +use std::ptr::NonNull; pub struct A; unsafe impl<'a> Alloc for &'a A { - unsafe fn alloc(&mut self, _: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, _: Layout) -> Result, AllocErr> { loop {} } - unsafe fn dealloc(&mut self, _ptr: *mut u8, _: Layout) { + unsafe fn dealloc(&mut self, _ptr: NonNull, _: Layout) { loop {} } } diff --git a/src/test/run-pass/allocator/auxiliary/custom.rs b/src/test/run-pass/allocator/auxiliary/custom.rs index 8f4fbcd5ab1c1..f028a4154c4b7 100644 --- a/src/test/run-pass/allocator/auxiliary/custom.rs +++ b/src/test/run-pass/allocator/auxiliary/custom.rs @@ -14,17 +14,18 @@ #![crate_type = "rlib"] use std::heap::{Alloc, System, AllocErr, Layout}; +use std::ptr::NonNull; use std::sync::atomic::{AtomicUsize, Ordering}; pub struct A(pub AtomicUsize); unsafe impl<'a> Alloc for &'a A { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { self.0.fetch_add(1, Ordering::SeqCst); System.alloc(layout) } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { self.0.fetch_add(1, Ordering::SeqCst); System.dealloc(ptr, layout) } diff --git a/src/test/run-pass/allocator/custom.rs b/src/test/run-pass/allocator/custom.rs index 22081678fb999..8be6d05fb7892 100644 --- a/src/test/run-pass/allocator/custom.rs +++ b/src/test/run-pass/allocator/custom.rs @@ -16,6 +16,7 @@ extern crate helper; use std::heap::{Heap, Alloc, System, Layout, AllocErr}; +use std::ptr::NonNull; use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; static HITS: AtomicUsize = ATOMIC_USIZE_INIT; @@ -23,12 +24,12 @@ static HITS: AtomicUsize = ATOMIC_USIZE_INIT; struct A; unsafe impl<'a> Alloc for &'a A { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { HITS.fetch_add(1, Ordering::SeqCst); System.alloc(layout) } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { HITS.fetch_add(1, Ordering::SeqCst); System.dealloc(ptr, layout) } diff --git a/src/test/run-pass/realloc-16687.rs b/src/test/run-pass/realloc-16687.rs index eddcd5a584a5d..4832e35b8f426 100644 --- a/src/test/run-pass/realloc-16687.rs +++ b/src/test/run-pass/realloc-16687.rs @@ -16,7 +16,7 @@ #![feature(heap_api, allocator_api)] use std::heap::{Heap, Alloc, Layout}; -use std::ptr; +use std::ptr::{self, NonNull}; fn main() { unsafe { @@ -56,7 +56,7 @@ unsafe fn test_triangle() -> bool { println!("allocate({:?}) = {:?}", layout, ret); } - ret + ret.as_ptr() } unsafe fn deallocate(ptr: *mut u8, layout: Layout) { @@ -64,7 +64,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Heap.dealloc(ptr, layout); + Heap.dealloc(NonNull::new_unchecked(ptr), layout); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { @@ -72,14 +72,14 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let ret = Heap.realloc(ptr, old.clone(), new.clone()) + let ret = Heap.realloc(NonNull::new_unchecked(ptr), old.clone(), new.clone()) .unwrap_or_else(|e| Heap.oom(e)); if PRINT { println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ret); } - ret + ret.as_ptr() } fn idx_to_size(i: usize) -> usize { (i+1) * 10 } diff --git a/src/test/run-pass/regions-mock-trans.rs b/src/test/run-pass/regions-mock-trans.rs index 8f278a315d1af..87924df4f7e2a 100644 --- a/src/test/run-pass/regions-mock-trans.rs +++ b/src/test/run-pass/regions-mock-trans.rs @@ -13,6 +13,7 @@ #![feature(allocator_api)] use std::heap::{Alloc, Heap, Layout}; +use std::ptr::NonNull; struct arena(()); @@ -33,7 +34,7 @@ fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> { unsafe { let ptr = Heap.alloc(Layout::new::()) .unwrap_or_else(|e| Heap.oom(e)); - &*(ptr as *const _) + &*(ptr.as_ptr() as *const _) } } @@ -45,7 +46,7 @@ fn g(fcx : &Fcx) { let bcx = Bcx { fcx: fcx }; let bcx2 = h(&bcx); unsafe { - Heap.dealloc(bcx2 as *const _ as *mut _, Layout::new::()); + Heap.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); } }