Skip to content

Commit

Permalink
Detect simultaneous borrow and drop of UnsafeCell
Browse files Browse the repository at this point in the history
Dropping an UnsafeCell or calling into_inner can be seen as mutable
access, which is not allowed to happen while another borrow exists.

The downside of this change is the added Drop implementation of
UnsafeCell. This restricts some uses of UnsafeCell that were previously
allowed related to drop check.

fixes tokio-rs#349
  • Loading branch information
e00E committed Apr 24, 2024
1 parent a7033ee commit bb18517
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 49 deletions.
22 changes: 21 additions & 1 deletion src/cell/unsafe_cell.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::mem::ManuallyDrop;

use crate::rt;

/// A checked version of `std::cell::UnsafeCell`.
Expand Down Expand Up @@ -115,7 +117,18 @@ impl<T> UnsafeCell<T> {

/// Unwraps the value.
pub fn into_inner(self) -> T {
self.data.into_inner()
// Observe that we have mutable access.
self.get_mut();

// We would like to destructure self. This is not possible because of
// the Drop implementation. Work around it using unsafe. Note that we
// extract `self.state` even though we don't use it. This prevents it
// from leaking.
let self_ = ManuallyDrop::new(self);
let _state = unsafe { std::ptr::read(&self_.state) };
let data = unsafe { std::ptr::read(&self_.data) };

data.into_inner()
}
}

Expand Down Expand Up @@ -214,6 +227,13 @@ impl<T> From<T> for UnsafeCell<T> {
}
}

impl<T: ?Sized> Drop for UnsafeCell<T> {
fn drop(&mut self) {
// Observe that we have mutable access.
self.get_mut();
}
}

impl<T: ?Sized> ConstPtr<T> {
/// Dereference the raw pointer.
///
Expand Down
70 changes: 22 additions & 48 deletions tests/unsafe_cell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,54 +60,6 @@ fn atomic_causality_success() {
});
}

#[test]
#[should_panic]
fn atomic_causality_fail() {
struct Chan {
data: UnsafeCell<usize>,
guard: AtomicUsize,
}

impl Chan {
fn set(&self) {
unsafe {
self.data.with_mut(|v| {
*v += 123;
});
}

self.guard.store(1, Release);
}

fn get(&self) {
unsafe {
self.data.with(|v| {
assert_eq!(*v, 123);
});
}
}
}

loom::model(|| {
let chan = Arc::new(Chan {
data: UnsafeCell::new(0),
guard: AtomicUsize::new(0),
});

let th = {
let chan = chan.clone();
thread::spawn(move || chan.set())
};

// Try getting before joining
chan.get();

th.join().unwrap();

chan.get();
});
}

#[derive(Clone)]
struct Data(Arc<UnsafeCell<usize>>);

Expand Down Expand Up @@ -333,3 +285,25 @@ fn unsafe_cell_access_after_sync() {
}
});
}

#[test]
#[should_panic]
fn unsafe_cell_access_during_drop() {
loom::model(|| {
let x = UnsafeCell::new(());
let _guard = x.get();
drop(x);
});
}

// Unfortunately we cannot repeat the previous test with `into_inner` instead of
// Drop because it leads to a second panic when UnsafeCell::drop runs and guard
// is still alive. The second panic leads to an abort.

#[test]
fn unsafe_cell_into_inner_ok() {
loom::model(|| {
let x = UnsafeCell::new(0usize);
x.into_inner();
});
}

0 comments on commit bb18517

Please sign in to comment.