Skip to content

Commit

Permalink
Merge pull request #329 from dtolnay/unsafeop
Browse files Browse the repository at this point in the history
Turn on deny(unsafe_op_in_unsafe_fn)
  • Loading branch information
dtolnay authored Dec 21, 2023
2 parents afb298e + 07aac81 commit d371a49
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 64 deletions.
4 changes: 4 additions & 0 deletions build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,10 @@ fn main() {
// core::fmt::Arguments::as_str
// https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html#stabilized-apis
println!("cargo:rustc-cfg=anyhow_no_fmt_arguments_as_str");

// #![deny(unsafe_op_in_unsafe_fn)]
// https://github.com/rust-lang/rust/issues/71668
println!("cargo:rustc-cfg=anyhow_no_unsafe_op_in_unsafe_fn_lint");
}
}

Expand Down
117 changes: 61 additions & 56 deletions src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -610,8 +610,8 @@ struct ErrorVTable {
unsafe fn object_drop<E>(e: Own<ErrorImpl>) {
// Cast back to ErrorImpl<E> so that the allocator receives the correct
// Layout to deallocate the Box's memory.
let unerased = e.cast::<ErrorImpl<E>>().boxed();
drop(unerased);
let unerased_own = e.cast::<ErrorImpl<E>>();
drop(unsafe { unerased_own.boxed() });
}

// Safety: requires layout of *e to match ErrorImpl<E>.
Expand All @@ -620,8 +620,8 @@ unsafe fn object_drop_front<E>(e: Own<ErrorImpl>, target: TypeId) {
// without dropping E itself. This is used by downcast after doing a
// ptr::read to take ownership of the E.
let _ = target;
let unerased = e.cast::<ErrorImpl<ManuallyDrop<E>>>().boxed();
drop(unerased);
let unerased_own = e.cast::<ErrorImpl<ManuallyDrop<E>>>();
drop(unsafe { unerased_own.boxed() });
}

// Safety: requires layout of *e to match ErrorImpl<E>.
Expand All @@ -631,15 +631,15 @@ where
{
// Attach E's native StdError vtable onto a pointer to self._object.

let unerased = e.cast::<ErrorImpl<E>>();
let unerased_ref = e.cast::<ErrorImpl<E>>();

#[cfg(not(anyhow_no_ptr_addr_of))]
return Ref::from_raw(NonNull::new_unchecked(
ptr::addr_of!((*unerased.as_ptr())._object) as *mut E,
));
return Ref::from_raw(unsafe {
NonNull::new_unchecked(ptr::addr_of!((*unerased_ref.as_ptr())._object) as *mut E)
});

#[cfg(anyhow_no_ptr_addr_of)]
return Ref::new(&unerased.deref()._object);
return Ref::new(unsafe { &unerased_ref.deref()._object });
}

// Safety: requires layout of *e to match ErrorImpl<E>, and for `e` to be derived
Expand All @@ -650,7 +650,8 @@ where
E: StdError + Send + Sync + 'static,
{
// Attach E's native StdError vtable onto a pointer to self._object.
&mut e.cast::<ErrorImpl<E>>().deref_mut()._object
let unerased_mut = e.cast::<ErrorImpl<E>>();
unsafe { &mut unerased_mut.deref_mut()._object }
}

// Safety: requires layout of *e to match ErrorImpl<E>.
Expand All @@ -659,7 +660,8 @@ where
E: StdError + Send + Sync + 'static,
{
// Attach ErrorImpl<E>'s native StdError vtable. The StdError impl is below.
e.cast::<ErrorImpl<E>>().boxed()
let unerased_own = e.cast::<ErrorImpl<E>>();
unsafe { unerased_own.boxed() }
}

// Safety: requires layout of *e to match ErrorImpl<E>.
Expand All @@ -671,18 +673,18 @@ where
// Caller is looking for an E pointer and e is ErrorImpl<E>, take a
// pointer to its E field.

let unerased = e.cast::<ErrorImpl<E>>();
let unerased_ref = e.cast::<ErrorImpl<E>>();

#[cfg(not(anyhow_no_ptr_addr_of))]
return Some(
Ref::from_raw(NonNull::new_unchecked(
ptr::addr_of!((*unerased.as_ptr())._object) as *mut E,
))
Ref::from_raw(unsafe {
NonNull::new_unchecked(ptr::addr_of!((*unerased_ref.as_ptr())._object) as *mut E)
})
.cast::<()>(),
);

#[cfg(anyhow_no_ptr_addr_of)]
return Some(Ref::new(&unerased.deref()._object).cast::<()>());
return Some(Ref::new(unsafe { &unerased_ref.deref()._object }).cast::<()>());
} else {
None
}
Expand All @@ -697,7 +699,8 @@ where
if TypeId::of::<E>() == target {
// Caller is looking for an E pointer and e is ErrorImpl<E>, take a
// pointer to its E field.
let unerased = e.cast::<ErrorImpl<E>>().deref_mut();
let unerased_mut = e.cast::<ErrorImpl<E>>();
let unerased = unsafe { unerased_mut.deref_mut() };
Some(Mut::new(&mut unerased._object).cast::<()>())
} else {
None
Expand All @@ -718,10 +721,12 @@ where
E: 'static,
{
if TypeId::of::<C>() == target {
let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref();
let unerased_ref = e.cast::<ErrorImpl<ContextError<C, E>>>();
let unerased = unsafe { unerased_ref.deref() };
Some(Ref::new(&unerased._object.context).cast::<()>())
} else if TypeId::of::<E>() == target {
let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref();
let unerased_ref = e.cast::<ErrorImpl<ContextError<C, E>>>();
let unerased = unsafe { unerased_ref.deref() };
Some(Ref::new(&unerased._object.error).cast::<()>())
} else {
None
Expand All @@ -736,10 +741,12 @@ where
E: 'static,
{
if TypeId::of::<C>() == target {
let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref_mut();
let unerased_mut = e.cast::<ErrorImpl<ContextError<C, E>>>();
let unerased = unsafe { unerased_mut.deref_mut() };
Some(Mut::new(&mut unerased._object.context).cast::<()>())
} else if TypeId::of::<E>() == target {
let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref_mut();
let unerased_mut = e.cast::<ErrorImpl<ContextError<C, E>>>();
let unerased = unsafe { unerased_mut.deref_mut() };
Some(Mut::new(&mut unerased._object.error).cast::<()>())
} else {
None
Expand All @@ -756,15 +763,11 @@ where
// Called after downcasting by value to either the C or the E and doing a
// ptr::read to take ownership of that value.
if TypeId::of::<C>() == target {
let unerased = e
.cast::<ErrorImpl<ContextError<ManuallyDrop<C>, E>>>()
.boxed();
drop(unerased);
let unerased_own = e.cast::<ErrorImpl<ContextError<ManuallyDrop<C>, E>>>();
drop(unsafe { unerased_own.boxed() });
} else {
let unerased = e
.cast::<ErrorImpl<ContextError<C, ManuallyDrop<E>>>>()
.boxed();
drop(unerased);
let unerased_own = e.cast::<ErrorImpl<ContextError<C, ManuallyDrop<E>>>>();
drop(unsafe { unerased_own.boxed() });
}
}

Expand All @@ -773,13 +776,14 @@ unsafe fn context_chain_downcast<C>(e: Ref<ErrorImpl>, target: TypeId) -> Option
where
C: 'static,
{
let unerased = e.cast::<ErrorImpl<ContextError<C, Error>>>().deref();
let unerased_ref = e.cast::<ErrorImpl<ContextError<C, Error>>>();
let unerased = unsafe { unerased_ref.deref() };
if TypeId::of::<C>() == target {
Some(Ref::new(&unerased._object.context).cast::<()>())
} else {
// Recurse down the context chain per the inner error's vtable.
let source = &unerased._object.error;
(vtable(source.inner.ptr).object_downcast)(source.inner.by_ref(), target)
unsafe { (vtable(source.inner.ptr).object_downcast)(source.inner.by_ref(), target) }
}
}

Expand All @@ -789,13 +793,14 @@ unsafe fn context_chain_downcast_mut<C>(e: Mut<ErrorImpl>, target: TypeId) -> Op
where
C: 'static,
{
let unerased = e.cast::<ErrorImpl<ContextError<C, Error>>>().deref_mut();
let unerased_mut = e.cast::<ErrorImpl<ContextError<C, Error>>>();
let unerased = unsafe { unerased_mut.deref_mut() };
if TypeId::of::<C>() == target {
Some(Mut::new(&mut unerased._object.context).cast::<()>())
} else {
// Recurse down the context chain per the inner error's vtable.
let source = &mut unerased._object.error;
(vtable(source.inner.ptr).object_downcast_mut)(source.inner.by_mut(), target)
unsafe { (vtable(source.inner.ptr).object_downcast_mut)(source.inner.by_mut(), target) }
}
}

Expand All @@ -807,21 +812,18 @@ where
// Called after downcasting by value to either the C or one of the causes
// and doing a ptr::read to take ownership of that value.
if TypeId::of::<C>() == target {
let unerased = e
.cast::<ErrorImpl<ContextError<ManuallyDrop<C>, Error>>>()
.boxed();
let unerased_own = e.cast::<ErrorImpl<ContextError<ManuallyDrop<C>, Error>>>();
// Drop the entire rest of the data structure rooted in the next Error.
drop(unerased);
drop(unsafe { unerased_own.boxed() });
} else {
let unerased = e
.cast::<ErrorImpl<ContextError<C, ManuallyDrop<Error>>>>()
.boxed();
let unerased_own = e.cast::<ErrorImpl<ContextError<C, ManuallyDrop<Error>>>>();
let unerased = unsafe { unerased_own.boxed() };
// Read the Own<ErrorImpl> from the next error.
let inner = unerased._object.error.inner;
drop(unerased);
let vtable = vtable(inner.ptr);
let vtable = unsafe { vtable(inner.ptr) };
// Recursively drop the next error using the same target typeid.
(vtable.object_drop_rest)(inner, target);
unsafe { (vtable.object_drop_rest)(inner, target) };
}
}

Expand All @@ -832,8 +834,9 @@ unsafe fn context_backtrace<C>(e: Ref<ErrorImpl>) -> Option<&Backtrace>
where
C: 'static,
{
let unerased = e.cast::<ErrorImpl<ContextError<C, Error>>>().deref();
let backtrace = ErrorImpl::backtrace(unerased._object.error.inner.by_ref());
let unerased_ref = e.cast::<ErrorImpl<ContextError<C, Error>>>();
let unerased = unsafe { unerased_ref.deref() };
let backtrace = unsafe { ErrorImpl::backtrace(unerased._object.error.inner.by_ref()) };
Some(backtrace)
}

Expand All @@ -853,7 +856,7 @@ pub(crate) struct ErrorImpl<E = ()> {
// avoids converting `p` into a reference.
unsafe fn vtable(p: NonNull<ErrorImpl>) -> &'static ErrorVTable {
// NOTE: This assumes that `ErrorVTable` is the first field of ErrorImpl.
*(p.as_ptr() as *const &'static ErrorVTable)
unsafe { *(p.as_ptr() as *const &'static ErrorVTable) }
}

// repr C to ensure that ContextError<C, E> has the same layout as
Expand All @@ -877,7 +880,7 @@ impl ErrorImpl {
pub(crate) unsafe fn error(this: Ref<Self>) -> &(dyn StdError + Send + Sync + 'static) {
// Use vtable to attach E's native StdError vtable for the right
// original type E.
(vtable(this.ptr).object_ref)(this).deref()
unsafe { (vtable(this.ptr).object_ref)(this).deref() }
}

#[cfg(feature = "std")]
Expand All @@ -886,42 +889,44 @@ impl ErrorImpl {
// original type E.

#[cfg(not(anyhow_no_ptr_addr_of))]
return (vtable(this.ptr).object_ref)(this.by_ref())
.by_mut()
.deref_mut();
return unsafe {
(vtable(this.ptr).object_ref)(this.by_ref())
.by_mut()
.deref_mut()
};

#[cfg(anyhow_no_ptr_addr_of)]
return (vtable(this.ptr).object_mut)(this);
return unsafe { (vtable(this.ptr).object_mut)(this) };
}

#[cfg(any(backtrace, feature = "backtrace"))]
pub(crate) unsafe fn backtrace(this: Ref<Self>) -> &Backtrace {
// This unwrap can only panic if the underlying error's backtrace method
// is nondeterministic, which would only happen in maliciously
// constructed code.
this.deref()
unsafe { this.deref() }
.backtrace
.as_ref()
.or_else(|| {
#[cfg(backtrace)]
return error::request_ref::<Backtrace>(Self::error(this));
return error::request_ref::<Backtrace>(unsafe { Self::error(this) });
#[cfg(not(backtrace))]
return (vtable(this.ptr).object_backtrace)(this);
return unsafe { (vtable(this.ptr).object_backtrace)(this) };
})
.expect("backtrace capture failed")
}

#[cfg(backtrace)]
unsafe fn provide<'a>(this: Ref<'a, Self>, request: &mut Request<'a>) {
if let Some(backtrace) = &this.deref().backtrace {
if let Some(backtrace) = unsafe { &this.deref().backtrace } {
request.provide_ref(backtrace);
}
Self::error(this).provide(request);
unsafe { Self::error(this) }.provide(request);
}

#[cold]
pub(crate) unsafe fn chain(this: Ref<Self>) -> Chain {
Chain::new(Self::error(this))
Chain::new(unsafe { Self::error(this) })
}
}

Expand Down
9 changes: 5 additions & 4 deletions src/fmt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@ use core::fmt::{self, Debug, Write};

impl ErrorImpl {
pub(crate) unsafe fn display(this: Ref<Self>, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", Self::error(this))?;
write!(f, "{}", unsafe { Self::error(this) })?;

if f.alternate() {
for cause in Self::chain(this).skip(1) {
let chain = unsafe { Self::chain(this) };
for cause in chain.skip(1) {
write!(f, ": {}", cause)?;
}
}
Expand All @@ -17,7 +18,7 @@ impl ErrorImpl {
}

pub(crate) unsafe fn debug(this: Ref<Self>, f: &mut fmt::Formatter) -> fmt::Result {
let error = Self::error(this);
let error = unsafe { Self::error(this) };

if f.alternate() {
return Debug::fmt(error, f);
Expand All @@ -43,7 +44,7 @@ impl ErrorImpl {
{
use crate::backtrace::BacktraceStatus;

let backtrace = Self::backtrace(this);
let backtrace = unsafe { Self::backtrace(this) };
if let BacktraceStatus::Captured = backtrace.status() {
let mut backtrace = backtrace.to_string();
write!(f, "\n\n")?;
Expand Down
5 changes: 5 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,11 @@
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
#![deny(dead_code, unused_imports, unused_mut)]
#![cfg_attr(
not(anyhow_no_unsafe_op_in_unsafe_fn_lint),
deny(unsafe_op_in_unsafe_fn)
)]
#![cfg_attr(anyhow_no_unsafe_op_in_unsafe_fn_lint, allow(unused_unsafe))]
#![allow(
clippy::doc_markdown,
clippy::enum_glob_use,
Expand Down
8 changes: 4 additions & 4 deletions src/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ where
}

pub unsafe fn boxed(self) -> Box<T> {
Box::from_raw(self.ptr.as_ptr())
unsafe { Box::from_raw(self.ptr.as_ptr()) }
}

pub fn by_ref(&self) -> Ref<T> {
Expand Down Expand Up @@ -120,7 +120,7 @@ where
}

pub unsafe fn deref(self) -> &'a T {
&*self.ptr.as_ptr()
unsafe { &*self.ptr.as_ptr() }
}
}

Expand Down Expand Up @@ -179,13 +179,13 @@ where
}

pub unsafe fn deref_mut(self) -> &'a mut T {
&mut *self.ptr.as_ptr()
unsafe { &mut *self.ptr.as_ptr() }
}
}

impl<'a, T> Mut<'a, T> {
pub unsafe fn read(self) -> T {
self.ptr.as_ptr().read()
unsafe { self.ptr.as_ptr().read() }
}
}

Expand Down

0 comments on commit d371a49

Please sign in to comment.