Building your own Box-like types
Smart pointers are types that implement Deref and optionally Drop, allowing them to behave like references while providing additional functionality like automatic cleanup, reference counting, or custom allocation.
pub trait Deref {
type Target: ?Sized;
fn deref(&self) -> &Self::Target;
}
pub trait Drop {
fn drop(&mut self);
}
use std::cell::RefCell;
use std::ops::{Deref, DerefMut};
use std::marker::PhantomData;
/// Arena that allocates objects in bulk
pub struct Arena {
storage: RefCell<Vec<u8>>,
allocated: RefCell<usize>,
}
/// Smart pointer to arena-allocated object
pub struct ArenaBox<'arena, T> {
ptr: *mut T,
_phantom: PhantomData<&'arena T>,
}
impl Arena {
pub fn new(capacity: usize) -> Self {
Arena {
storage: RefCell::new(vec![0; capacity]),
allocated: RefCell::new(0),
}
}
/// Allocate object in arena
pub fn alloc<T>(&self, value: T) -> ArenaBox<T> {
let size = std::mem::size_of::<T>();
let align = std::mem::align_of::<T>();
let mut allocated = self.allocated.borrow_mut();
let mut storage = self.storage.borrow_mut();
// Align pointer
let offset = (*allocated + align - 1) & !(align - 1);
assert!(offset + size <= storage.len(), "Arena out of memory");
// Get pointer to allocation
let ptr = unsafe {
storage.as_mut_ptr().add(offset) as *mut T
};
// Write value
unsafe {
std::ptr::write(ptr, value);
}
*allocated = offset + size;
ArenaBox {
ptr,
_phantom: PhantomData,
}
}
pub fn reset(&self) {
*self.allocated.borrow_mut() = 0;
}
}
impl<'arena, T> Deref for ArenaBox<'arena, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.ptr }
}
}
impl<'arena, T> DerefMut for ArenaBox<'arena, T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.ptr }
}
}
// Note: ArenaBox intentionally does NOT implement Drop
// Objects are dropped when Arena is dropped or reset
// Usage
fn arena_example() {
let arena = Arena::new(1024);
{
let mut x = arena.alloc(42);
let mut y = arena.alloc(String::from("hello"));
*x += 10;
y.push_str(" world");
println!("x: {}, y: {}", *x, *y);
}
// Objects still in arena, not dropped yet
arena.reset(); // Bulk deallocation
}
use std::ops::Deref;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::ptr::NonNull;
struct MyArcInner<T> {
strong: AtomicUsize,
weak: AtomicUsize,
data: T,
}
pub struct MyArc<T> {
ptr: NonNull<MyArcInner<T>>,
}
pub struct MyWeak<T> {
ptr: NonNull<MyArcInner<T>>,
}
impl<T> MyArc<T> {
pub fn new(data: T) -> Self {
let inner = Box::new(MyArcInner {
strong: AtomicUsize::new(1),
weak: AtomicUsize::new(0),
data,
});
MyArc {
ptr: unsafe { NonNull::new_unchecked(Box::into_raw(inner)) },
}
}
pub fn downgrade(&self) -> MyWeak<T> {
let inner = unsafe { self.ptr.as_ref() };
inner.weak.fetch_add(1, Ordering::Relaxed);
MyWeak { ptr: self.ptr }
}
pub fn strong_count(&self) -> usize {
let inner = unsafe { self.ptr.as_ref() };
inner.strong.load(Ordering::Relaxed)
}
}
impl<T> Clone for MyArc<T> {
fn clone(&self) -> Self {
let inner = unsafe { self.ptr.as_ref() };
inner.strong.fetch_add(1, Ordering::Relaxed);
MyArc { ptr: self.ptr }
}
}
impl<T> Deref for MyArc<T> {
type Target = T;
fn deref(&self) -> &T {
let inner = unsafe { self.ptr.as_ref() };
&inner.data
}
}
impl<T> Drop for MyArc<T> {
fn drop(&mut self) {
let inner = unsafe { self.ptr.as_ref() };
let old_strong = inner.strong.fetch_sub(1, Ordering::Release);
if old_strong == 1 {
// Last strong reference - drop data
std::sync::atomic::fence(Ordering::Acquire);
unsafe {
std::ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data);
}
// If no weak references, deallocate
if inner.weak.load(Ordering::Relaxed) == 0 {
unsafe {
drop(Box::from_raw(self.ptr.as_ptr()));
}
}
}
}
}
unsafe impl<T: Send + Sync> Send for MyArc<T> {}
unsafe impl<T: Send + Sync> Sync for MyArc<T> {}
use std::ops::{Deref, DerefMut};
/// Generic scope guard that runs cleanup on drop
pub struct ScopeGuard<T, F: FnOnce(&mut T)> {
value: Option<T>,
cleanup: Option<F>,
}
impl<T, F: FnOnce(&mut T)> ScopeGuard<T, F> {
pub fn new(value: T, cleanup: F) -> Self {
ScopeGuard {
value: Some(value),
cleanup: Some(cleanup),
}
}
/// Defuse guard (don't run cleanup)
pub fn defuse(mut self) -> T {
self.cleanup = None;
self.value.take().unwrap()
}
}
impl<T, F: FnOnce(&mut T)> Deref for ScopeGuard<T, F> {
type Target = T;
fn deref(&self) -> &T {
self.value.as_ref().unwrap()
}
}
impl<T, F: FnOnce(&mut T)> DerefMut for ScopeGuard<T, F> {
fn deref_mut(&mut self) -> &mut T {
self.value.as_mut().unwrap()
}
}
impl<T, F: FnOnce(&mut T)> Drop for ScopeGuard<T, F> {
fn drop(&mut self) {
if let Some(cleanup) = self.cleanup.take() {
cleanup(self.value.as_mut().unwrap());
}
}
}
// Usage examples
fn scopeguard_examples() {
// Example 1: File cleanup
{
let file = std::fs::File::create("temp.txt").unwrap();
let guard = ScopeGuard::new(file, |_| {
std::fs::remove_file("temp.txt").ok();
});
// Use file...
} // Cleanup runs here
// Example 2: Mutex lock with logging
{
let mutex = std::sync::Mutex::new(vec![1, 2, 3]);
let guard = ScopeGuard::new(mutex.lock().unwrap(), |data| {
println!("Releasing lock with {} items", data.len());
});
guard.push(4);
} // Logs on unlock
// Example 3: Transaction
{
let mut transaction = begin_transaction();
let guard = ScopeGuard::new(transaction, |tx| {
println!("Auto-rollback transaction");
rollback(tx);
});
// If commit succeeds, defuse guard
commit(&mut guard);
guard.defuse(); // Don't rollback
}
}
fn begin_transaction() -> u32 { 0 }
fn commit(_tx: &mut u32) {}
fn rollback(_tx: &mut u32) {}
use std::ops::Deref;
/// UserId - can't accidentally use as OrderId
pub struct UserId(u64);
/// OrderId - distinct type
pub struct OrderId(u64);
impl Deref for UserId {
type Target = u64;
fn deref(&self) -> &u64 {
&self.0
}
}
impl UserId {
pub fn new(id: u64) -> Self {
UserId(id)
}
}
// Usage
fn fetch_user(id: UserId) {
println!("Fetching user {}", *id);
}
fn fetch_order(id: OrderId) {
println!("Fetching order {}", *id);
}
// Type safety!
let user_id = UserId::new(123);
let order_id = OrderId::new(456);
fetch_user(user_id);
// fetch_user(order_id); // ERROR: Type mismatch!
use std::ops::{Deref, DerefMut};
use std::mem::ManuallyDrop;
/// SmallBox: stores small values inline, large values on heap
pub enum SmallBox<T> {
Inline(ManuallyDrop<T>),
Heap(Box<T>),
}
impl<T> SmallBox<T> {
pub fn new(value: T) -> Self {
if std::mem::size_of::<T>() <= 16 {
SmallBox::Inline(ManuallyDrop::new(value))
} else {
SmallBox::Heap(Box::new(value))
}
}
}
impl<T> Deref for SmallBox<T> {
type Target = T;
fn deref(&self) -> &T {
match self {
SmallBox::Inline(value) => value,
SmallBox::Heap(boxed) => boxed,
}
}
}
impl<T> DerefMut for SmallBox<T> {
fn deref_mut(&mut self) -> &mut T {
match self {
SmallBox::Inline(value) => value,
SmallBox::Heap(boxed) => boxed,
}
}
}
impl<T> Drop for SmallBox<T> {
fn drop(&mut self) {
match self {
SmallBox::Inline(value) => unsafe {
ManuallyDrop::drop(value);
},
SmallBox::Heap(_) => {
// Box handles drop
}
}
}
}
use std::sync::{Arc, Mutex};
use std::ops::Deref;
use std::collections::VecDeque;
/// Object pool for expensive-to-create objects
pub struct Pool<T> {
available: Arc<Mutex<VecDeque<T>>>,
factory: Arc<dyn Fn() -> T + Send + Sync>,
}
/// Pooled object - returns to pool on drop
pub struct Pooled<T> {
value: Option<T>,
pool: Arc<Mutex<VecDeque<T>>>,
}
impl<T> Pool<T> {
pub fn new<F>(size: usize, factory: F) -> Self
where
F: Fn() -> T + Send + Sync + 'static,
{
let mut available = VecDeque::new();
for _ in 0..size {
available.push_back(factory());
}
Pool {
available: Arc::new(Mutex::new(available)),
factory: Arc::new(factory),
}
}
pub fn acquire(&self) -> Pooled<T> {
let value = self.available.lock().unwrap()
.pop_front()
.unwrap_or_else(|| (self.factory)());
Pooled {
value: Some(value),
pool: Arc::clone(&self.available),
}
}
}
impl<T> Deref for Pooled<T> {
type Target = T;
fn deref(&self) -> &T {
self.value.as_ref().unwrap()
}
}
impl<T> Drop for Pooled<T> {
fn drop(&mut self) {
if let Some(value) = self.value.take() {
self.pool.lock().unwrap().push_back(value);
}
}
}
// Usage
fn pool_example() {
let pool = Pool::new(3, || vec![0; 1024]);
{
let obj1 = pool.acquire();
let obj2 = pool.acquire();
println!("Obj1 len: {}", obj1.len());
println!("Obj2 len: {}", obj2.len());
} // Both returned to pool
// Can reuse
let obj3 = pool.acquire();
}
impl<T> Deref for MySmartPointer<T> {
type Target = T;
fn deref(&self) -> &T {
// Return reference to contained value
}
}
// Enables: smart_ptr.method() instead of (*smart_ptr).method()
impl<T> Drop for MySmartPointer<T> {
fn drop(&mut self) {
// Cleanup: deallocate, decrement counter, unlock, etc.
}
}
unsafe impl<T: Send> Send for MySmartPointer<T> {}
unsafe impl<T: Sync> Sync for MySmartPointer<T> {}
// Only if your implementation is actually thread-safe!
// BAD: Deref should return the "contained" type
struct Wrapper(String);
impl Deref for Wrapper {
type Target = i32; // ❌ Unrelated type!
fn deref(&self) -> &i32 {
todo!()
}
}
// GOOD: Deref to contained type
impl Deref for Wrapper {
type Target = str; // ✅ Related type
fn deref(&self) -> &str {
&self.0
}
}
// BAD: Not a pointer type
struct Config {
value: String,
}
impl Deref for Config {
type Target = str;
fn deref(&self) -> &str {
&self.value
}
}
// Deref is meant for smart pointers, not all wrapper types
// BAD: Leaks memory
impl<T> Drop for MyBox<T> {
fn drop(&mut self) {
// Forgot to drop T!
unsafe {
std::alloc::dealloc(self.ptr as *mut u8, layout);
}
}
}
// GOOD: Drop T first
impl<T> Drop for MyBox<T> {
fn drop(&mut self) {
unsafe {
std::ptr::drop_in_place(self.ptr); // Drop T
std::alloc::dealloc(self.ptr as *mut u8, layout);
}
}
}
Build a Box that counts total allocations/deallocations.
Hints:Create a smart pointer that initializes on first access.
Hints:Build a pointer that logs all accesses for debugging.
Hints:Custom arena allocator for fast bulk allocations.
View on GitHubLazy initialization smart pointer pattern.
View on GitHubSelf-referential structs using custom smart pointers.
View on GitHubRun this code in the official Rust Playground