diff --git a/alloc/src/borrow.rs b/alloc/src/borrow.rs index 3aa891f9..aace8412 100644 --- a/alloc/src/borrow.rs +++ b/alloc/src/borrow.rs @@ -46,8 +46,8 @@ where A: Allocator impl<'alloc, A> Deref for BorrowedPtr<'alloc, A> where A: Allocator , A: 'alloc { - type Target = Address; - fn deref(&self) -> &Self::Target { &(*self.ptr) } + type Target = Unique; + fn deref(&self) -> &Self::Target { &self.ptr } } impl<'alloc, A> Drop for BorrowedPtr<'alloc, A> @@ -55,7 +55,7 @@ where A: Allocator , A: 'alloc { fn drop(&mut self) { unsafe { - self.allocator.lock().dealloc(*self.ptr, self.layout.clone()) + self.allocator.lock().dealloc(self.ptr.as_ptr(), self.layout.clone()) } } } @@ -91,14 +91,14 @@ impl<'alloc, A, T> Deref for Borrowed<'alloc, A, T> where A: Allocator , A: 'alloc { type Target = T; - fn deref(&self) -> &Self::Target { unsafe { self.value.get() } } + fn deref(&self) -> &Self::Target { unsafe { self.value.as_ref() } } } impl<'alloc, A, T> DerefMut for Borrowed<'alloc, A, T> where A: Allocator , A: 'alloc { fn deref_mut(&mut self) -> &mut Self::Target { - unsafe { self.value.get_mut() } + unsafe { self.value.as_mut() } } } @@ -107,15 +107,15 @@ where A: Allocator , A: 'alloc { fn drop(&mut self) { use mem::drop; - let address = *self.value as Address; + let address = self.value.as_ptr() as Address; // ensure we drop the object _before_ deallocating it, so that // the object's destructor gets run first // i hope this is correct... - drop(*self.value); + drop(self.value.as_ptr()); unsafe { self.allocator.lock() .dealloc( address - , Layout::for_value(self.value.get())) + , Layout::for_value(self.value.as_ref())) } } } diff --git a/alloc/src/lib.rs b/alloc/src/lib.rs index fd2a7ff6..87e00118 100644 --- a/alloc/src/lib.rs +++ b/alloc/src/lib.rs @@ -637,7 +637,7 @@ pub unsafe trait Allocator { /// Captures a common usage pattern for allocators. unsafe fn dealloc_one(&mut self, mut ptr: Unique) where Self: Sized { - let raw_ptr = ptr.get_mut() as *mut T as *mut u8; + let raw_ptr = ptr.as_mut() as *mut T as *mut u8; self.dealloc(raw_ptr, Layout::new::()); } @@ -682,7 +682,9 @@ pub unsafe trait Allocator { n_old: usize, n_new: usize) -> Result, AllocErr> where Self: Sized { - match (Layout::array::(n_old), Layout::array::(n_new), *ptr) { + match ( Layout::array::(n_old) + , Layout::array::(n_new) + , ptr.as_ptr()) { (Some(ref k_old), Some(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => { self.realloc(ptr as *mut u8, k_old.clone(), k_new.clone()) .map(|p|Unique::new(p as *mut T)) @@ -698,7 +700,7 @@ pub unsafe trait Allocator { /// Captures a common usage pattern for allocators. unsafe fn dealloc_array(&mut self, ptr: Unique, n: usize) -> Result<(), AllocErr> where Self: Sized { - let raw_ptr = *ptr as *mut u8; + let raw_ptr = ptr.as_ptr() as *mut u8; match Layout::array::(n) { Some(ref k) if k.size() > 0 => { Ok(self.dealloc(raw_ptr, k.clone())) @@ -804,9 +806,9 @@ pub unsafe trait Allocator { n_old: usize, n_new: usize) -> Option> where Self: Sized { - let (k_old, k_new, ptr) = (Layout::array_unchecked::(n_old), - Layout::array_unchecked::(n_new), - *ptr); + let (k_old, k_new, ptr) = ( Layout::array_unchecked::(n_old) + , Layout::array_unchecked::(n_new) + , ptr.as_ptr()); self.realloc_unchecked(ptr as *mut u8, k_old, k_new) .map(|p|Unique::new(*p as *mut T)) } @@ -821,7 +823,7 @@ pub unsafe trait Allocator { unsafe fn dealloc_array_unchecked(&mut self, ptr: Unique, n: usize) where Self: Sized { let layout = Layout::array_unchecked::(n); - self.dealloc(*ptr as *mut u8, layout); + self.dealloc(ptr.as_ptr() as *mut u8, layout); } } diff --git a/paging/src/arch/x86_64/mod.rs b/paging/src/arch/x86_64/mod.rs index 565b0bc2..0f385bcf 100644 --- a/paging/src/arch/x86_64/mod.rs +++ b/paging/src/arch/x86_64/mod.rs @@ -251,11 +251,11 @@ impl ActivePML4 { } fn pml4(&self) -> &Table { - unsafe { self.0.get() } + unsafe { self.0.as_ref() } } fn pml4_mut(&mut self) -> &mut Table { - unsafe { self.0.get_mut() } + unsafe { self.0.as_mut() } } /// Returns true if the given page is mapped.