From 07aac819f6735b74f66a6672bc6947d5a4f90237 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Dec 2023 16:27:28 -0800 Subject: [PATCH] Fill in unsafe blocks inside unsafe functions --- src/error.rs | 117 +++++++++++++++++++++++++++------------------------ src/fmt.rs | 9 ++-- src/ptr.rs | 8 ++-- 3 files changed, 70 insertions(+), 64 deletions(-) diff --git a/src/error.rs b/src/error.rs index 01402d4..1817646 100644 --- a/src/error.rs +++ b/src/error.rs @@ -610,8 +610,8 @@ struct ErrorVTable { unsafe fn object_drop(e: Own) { // Cast back to ErrorImpl so that the allocator receives the correct // Layout to deallocate the Box's memory. - let unerased = e.cast::>().boxed(); - drop(unerased); + let unerased_own = e.cast::>(); + drop(unsafe { unerased_own.boxed() }); } // Safety: requires layout of *e to match ErrorImpl. @@ -620,8 +620,8 @@ unsafe fn object_drop_front(e: Own, target: TypeId) { // without dropping E itself. This is used by downcast after doing a // ptr::read to take ownership of the E. let _ = target; - let unerased = e.cast::>>().boxed(); - drop(unerased); + let unerased_own = e.cast::>>(); + drop(unsafe { unerased_own.boxed() }); } // Safety: requires layout of *e to match ErrorImpl. @@ -631,15 +631,15 @@ where { // Attach E's native StdError vtable onto a pointer to self._object. - let unerased = e.cast::>(); + let unerased_ref = e.cast::>(); #[cfg(not(anyhow_no_ptr_addr_of))] - return Ref::from_raw(NonNull::new_unchecked( - ptr::addr_of!((*unerased.as_ptr())._object) as *mut E, - )); + return Ref::from_raw(unsafe { + NonNull::new_unchecked(ptr::addr_of!((*unerased_ref.as_ptr())._object) as *mut E) + }); #[cfg(anyhow_no_ptr_addr_of)] - return Ref::new(&unerased.deref()._object); + return Ref::new(unsafe { &unerased_ref.deref()._object }); } // Safety: requires layout of *e to match ErrorImpl, and for `e` to be derived @@ -650,7 +650,8 @@ where E: StdError + Send + Sync + 'static, { // Attach E's native StdError vtable onto a pointer to self._object. - &mut e.cast::>().deref_mut()._object + let unerased_mut = e.cast::>(); + unsafe { &mut unerased_mut.deref_mut()._object } } // Safety: requires layout of *e to match ErrorImpl. @@ -659,7 +660,8 @@ where E: StdError + Send + Sync + 'static, { // Attach ErrorImpl's native StdError vtable. The StdError impl is below. - e.cast::>().boxed() + let unerased_own = e.cast::>(); + unsafe { unerased_own.boxed() } } // Safety: requires layout of *e to match ErrorImpl. @@ -671,18 +673,18 @@ where // Caller is looking for an E pointer and e is ErrorImpl, take a // pointer to its E field. - let unerased = e.cast::>(); + let unerased_ref = e.cast::>(); #[cfg(not(anyhow_no_ptr_addr_of))] return Some( - Ref::from_raw(NonNull::new_unchecked( - ptr::addr_of!((*unerased.as_ptr())._object) as *mut E, - )) + Ref::from_raw(unsafe { + NonNull::new_unchecked(ptr::addr_of!((*unerased_ref.as_ptr())._object) as *mut E) + }) .cast::<()>(), ); #[cfg(anyhow_no_ptr_addr_of)] - return Some(Ref::new(&unerased.deref()._object).cast::<()>()); + return Some(Ref::new(unsafe { &unerased_ref.deref()._object }).cast::<()>()); } else { None } @@ -697,7 +699,8 @@ where if TypeId::of::() == target { // Caller is looking for an E pointer and e is ErrorImpl, take a // pointer to its E field. - let unerased = e.cast::>().deref_mut(); + let unerased_mut = e.cast::>(); + let unerased = unsafe { unerased_mut.deref_mut() }; Some(Mut::new(&mut unerased._object).cast::<()>()) } else { None @@ -718,10 +721,12 @@ where E: 'static, { if TypeId::of::() == target { - let unerased = e.cast::>>().deref(); + let unerased_ref = e.cast::>>(); + let unerased = unsafe { unerased_ref.deref() }; Some(Ref::new(&unerased._object.context).cast::<()>()) } else if TypeId::of::() == target { - let unerased = e.cast::>>().deref(); + let unerased_ref = e.cast::>>(); + let unerased = unsafe { unerased_ref.deref() }; Some(Ref::new(&unerased._object.error).cast::<()>()) } else { None @@ -736,10 +741,12 @@ where E: 'static, { if TypeId::of::() == target { - let unerased = e.cast::>>().deref_mut(); + let unerased_mut = e.cast::>>(); + let unerased = unsafe { unerased_mut.deref_mut() }; Some(Mut::new(&mut unerased._object.context).cast::<()>()) } else if TypeId::of::() == target { - let unerased = e.cast::>>().deref_mut(); + let unerased_mut = e.cast::>>(); + let unerased = unsafe { unerased_mut.deref_mut() }; Some(Mut::new(&mut unerased._object.error).cast::<()>()) } else { None @@ -756,15 +763,11 @@ where // Called after downcasting by value to either the C or the E and doing a // ptr::read to take ownership of that value. if TypeId::of::() == target { - let unerased = e - .cast::, E>>>() - .boxed(); - drop(unerased); + let unerased_own = e.cast::, E>>>(); + drop(unsafe { unerased_own.boxed() }); } else { - let unerased = e - .cast::>>>() - .boxed(); - drop(unerased); + let unerased_own = e.cast::>>>(); + drop(unsafe { unerased_own.boxed() }); } } @@ -773,13 +776,14 @@ unsafe fn context_chain_downcast(e: Ref, target: TypeId) -> Option where C: 'static, { - let unerased = e.cast::>>().deref(); + let unerased_ref = e.cast::>>(); + let unerased = unsafe { unerased_ref.deref() }; if TypeId::of::() == target { Some(Ref::new(&unerased._object.context).cast::<()>()) } else { // Recurse down the context chain per the inner error's vtable. let source = &unerased._object.error; - (vtable(source.inner.ptr).object_downcast)(source.inner.by_ref(), target) + unsafe { (vtable(source.inner.ptr).object_downcast)(source.inner.by_ref(), target) } } } @@ -789,13 +793,14 @@ unsafe fn context_chain_downcast_mut(e: Mut, target: TypeId) -> Op where C: 'static, { - let unerased = e.cast::>>().deref_mut(); + let unerased_mut = e.cast::>>(); + let unerased = unsafe { unerased_mut.deref_mut() }; if TypeId::of::() == target { Some(Mut::new(&mut unerased._object.context).cast::<()>()) } else { // Recurse down the context chain per the inner error's vtable. let source = &mut unerased._object.error; - (vtable(source.inner.ptr).object_downcast_mut)(source.inner.by_mut(), target) + unsafe { (vtable(source.inner.ptr).object_downcast_mut)(source.inner.by_mut(), target) } } } @@ -807,21 +812,18 @@ where // Called after downcasting by value to either the C or one of the causes // and doing a ptr::read to take ownership of that value. if TypeId::of::() == target { - let unerased = e - .cast::, Error>>>() - .boxed(); + let unerased_own = e.cast::, Error>>>(); // Drop the entire rest of the data structure rooted in the next Error. - drop(unerased); + drop(unsafe { unerased_own.boxed() }); } else { - let unerased = e - .cast::>>>() - .boxed(); + let unerased_own = e.cast::>>>(); + let unerased = unsafe { unerased_own.boxed() }; // Read the Own from the next error. let inner = unerased._object.error.inner; drop(unerased); - let vtable = vtable(inner.ptr); + let vtable = unsafe { vtable(inner.ptr) }; // Recursively drop the next error using the same target typeid. - (vtable.object_drop_rest)(inner, target); + unsafe { (vtable.object_drop_rest)(inner, target) }; } } @@ -832,8 +834,9 @@ unsafe fn context_backtrace(e: Ref) -> Option<&Backtrace> where C: 'static, { - let unerased = e.cast::>>().deref(); - let backtrace = ErrorImpl::backtrace(unerased._object.error.inner.by_ref()); + let unerased_ref = e.cast::>>(); + let unerased = unsafe { unerased_ref.deref() }; + let backtrace = unsafe { ErrorImpl::backtrace(unerased._object.error.inner.by_ref()) }; Some(backtrace) } @@ -853,7 +856,7 @@ pub(crate) struct ErrorImpl { // avoids converting `p` into a reference. unsafe fn vtable(p: NonNull) -> &'static ErrorVTable { // NOTE: This assumes that `ErrorVTable` is the first field of ErrorImpl. - *(p.as_ptr() as *const &'static ErrorVTable) + unsafe { *(p.as_ptr() as *const &'static ErrorVTable) } } // repr C to ensure that ContextError has the same layout as @@ -877,7 +880,7 @@ impl ErrorImpl { pub(crate) unsafe fn error(this: Ref) -> &(dyn StdError + Send + Sync + 'static) { // Use vtable to attach E's native StdError vtable for the right // original type E. - (vtable(this.ptr).object_ref)(this).deref() + unsafe { (vtable(this.ptr).object_ref)(this).deref() } } #[cfg(feature = "std")] @@ -886,12 +889,14 @@ impl ErrorImpl { // original type E. #[cfg(not(anyhow_no_ptr_addr_of))] - return (vtable(this.ptr).object_ref)(this.by_ref()) - .by_mut() - .deref_mut(); + return unsafe { + (vtable(this.ptr).object_ref)(this.by_ref()) + .by_mut() + .deref_mut() + }; #[cfg(anyhow_no_ptr_addr_of)] - return (vtable(this.ptr).object_mut)(this); + return unsafe { (vtable(this.ptr).object_mut)(this) }; } #[cfg(any(backtrace, feature = "backtrace"))] @@ -899,29 +904,29 @@ impl ErrorImpl { // This unwrap can only panic if the underlying error's backtrace method // is nondeterministic, which would only happen in maliciously // constructed code. - this.deref() + unsafe { this.deref() } .backtrace .as_ref() .or_else(|| { #[cfg(backtrace)] - return error::request_ref::(Self::error(this)); + return error::request_ref::(unsafe { Self::error(this) }); #[cfg(not(backtrace))] - return (vtable(this.ptr).object_backtrace)(this); + return unsafe { (vtable(this.ptr).object_backtrace)(this) }; }) .expect("backtrace capture failed") } #[cfg(backtrace)] unsafe fn provide<'a>(this: Ref<'a, Self>, request: &mut Request<'a>) { - if let Some(backtrace) = &this.deref().backtrace { + if let Some(backtrace) = unsafe { &this.deref().backtrace } { request.provide_ref(backtrace); } - Self::error(this).provide(request); + unsafe { Self::error(this) }.provide(request); } #[cold] pub(crate) unsafe fn chain(this: Ref) -> Chain { - Chain::new(Self::error(this)) + Chain::new(unsafe { Self::error(this) }) } } diff --git a/src/fmt.rs b/src/fmt.rs index 03d8fd3..45209f6 100644 --- a/src/fmt.rs +++ b/src/fmt.rs @@ -5,10 +5,11 @@ use core::fmt::{self, Debug, Write}; impl ErrorImpl { pub(crate) unsafe fn display(this: Ref, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", Self::error(this))?; + write!(f, "{}", unsafe { Self::error(this) })?; if f.alternate() { - for cause in Self::chain(this).skip(1) { + let chain = unsafe { Self::chain(this) }; + for cause in chain.skip(1) { write!(f, ": {}", cause)?; } } @@ -17,7 +18,7 @@ impl ErrorImpl { } pub(crate) unsafe fn debug(this: Ref, f: &mut fmt::Formatter) -> fmt::Result { - let error = Self::error(this); + let error = unsafe { Self::error(this) }; if f.alternate() { return Debug::fmt(error, f); @@ -43,7 +44,7 @@ impl ErrorImpl { { use crate::backtrace::BacktraceStatus; - let backtrace = Self::backtrace(this); + let backtrace = unsafe { Self::backtrace(this) }; if let BacktraceStatus::Captured = backtrace.status() { let mut backtrace = backtrace.to_string(); write!(f, "\n\n")?; diff --git a/src/ptr.rs b/src/ptr.rs index c7fe488..a1b7126 100644 --- a/src/ptr.rs +++ b/src/ptr.rs @@ -42,7 +42,7 @@ where } pub unsafe fn boxed(self) -> Box { - Box::from_raw(self.ptr.as_ptr()) + unsafe { Box::from_raw(self.ptr.as_ptr()) } } pub fn by_ref(&self) -> Ref { @@ -120,7 +120,7 @@ where } pub unsafe fn deref(self) -> &'a T { - &*self.ptr.as_ptr() + unsafe { &*self.ptr.as_ptr() } } } @@ -179,13 +179,13 @@ where } pub unsafe fn deref_mut(self) -> &'a mut T { - &mut *self.ptr.as_ptr() + unsafe { &mut *self.ptr.as_ptr() } } } impl<'a, T> Mut<'a, T> { pub unsafe fn read(self) -> T { - self.ptr.as_ptr().read() + unsafe { self.ptr.as_ptr().read() } } }