|
| 1 | +#ifndef REF_BLOCK_BASE_H |
| 2 | +#define REF_BLOCK_BASE_H |
| 3 | + |
| 4 | +#include <atomic> |
| 5 | +#include <memory> |
| 6 | +#include <functional> |
| 7 | +#include <cstddef> |
| 8 | + |
| 9 | +#include "cpp_utility.h" |
| 10 | + |
| 11 | +struct RefBlockBase |
| 12 | +{ |
| 13 | + // atomic operation |
| 14 | + std::atomic<size_t> m_shared_count{1}; // when init, create the shared_ptr and own it |
| 15 | + std::atomic<size_t> m_weak_count{1}; // when init, m_shared_count as one "weak reference" |
| 16 | + |
| 17 | + virtual ~RefBlockBase() = default; |
| 18 | + |
| 19 | + // type erasure |
| 20 | + virtual void |
| 21 | + dispose_resource() = 0; |
| 22 | + virtual void |
| 23 | + destroy_self() = 0; |
| 24 | + virtual void* |
| 25 | + get_resource_ptr() |
| 26 | + { |
| 27 | + return nullptr; |
| 28 | + } |
| 29 | + |
| 30 | + // ---- thread safe counter operation ---- |
| 31 | + void |
| 32 | + increment_shared() noexcept |
| 33 | + { |
| 34 | + m_shared_count.fetch_add(1, std::memory_order_relaxed); |
| 35 | + } |
| 36 | + |
| 37 | + void |
| 38 | + increment_weak() noexcept |
| 39 | + { |
| 40 | + m_weak_count.fetch_add(1, std::memory_order_relaxed); |
| 41 | + } |
| 42 | + |
| 43 | + void |
| 44 | + decrement_shared() noexcept |
| 45 | + { |
| 46 | + // fetch_sub return the value before minus |
| 47 | + // use acq_rel (Acquire-Release) ensure memory safe |
| 48 | + if (m_shared_count.fetch_sub(1, std::memory_order_acq_rel) == 1) { |
| 49 | + dispose_resource(); |
| 50 | + decrement_weak(); |
| 51 | + } |
| 52 | + } |
| 53 | + |
| 54 | + void |
| 55 | + decrement_weak() noexcept |
| 56 | + { |
| 57 | + if (m_weak_count.fetch_sub(1, std::memory_order_acq_rel) == 1) { |
| 58 | + // weak counter oges 0, destroy control block |
| 59 | + destroy_self(); |
| 60 | + } |
| 61 | + } |
| 62 | + |
| 63 | + bool |
| 64 | + try_increment_shared() noexcept |
| 65 | + { |
| 66 | + size_t count = m_shared_count.load(std::memory_order_relaxed); |
| 67 | + |
| 68 | + while (count != 0) { |
| 69 | + // try to replace count with count + 1 |
| 70 | + if (m_shared_count.compare_exchange_weak(count, count + 1, std::memory_order_acq_rel)) { |
| 71 | + return true; // success |
| 72 | + } |
| 73 | + } |
| 74 | + return false; |
| 75 | + } |
| 76 | +}; |
| 77 | + |
| 78 | +// for 'new' |
| 79 | +// Y is the actual type, D is the del type |
| 80 | +template <typename Y, typename D> struct RefBlockImpl : public RefBlockBase |
| 81 | +{ |
| 82 | + Y* m_resource; |
| 83 | + D m_deleter; |
| 84 | + |
| 85 | + RefBlockImpl(Y* res, D del) : m_resource(res), m_deleter(utility::move(del)) {} |
| 86 | + |
| 87 | + void |
| 88 | + dispose_resource() override |
| 89 | + { |
| 90 | + // call the deleter |
| 91 | + m_deleter(m_resource); |
| 92 | + } |
| 93 | + |
| 94 | + void |
| 95 | + destroy_self() override |
| 96 | + { |
| 97 | + // destroy self |
| 98 | + delete this; |
| 99 | + } |
| 100 | +}; |
| 101 | + |
| 102 | +template <typename T> struct RefBlockMakeShared : public RefBlockBase |
| 103 | +{ |
| 104 | + // T's data will followed directly after this struct |
| 105 | + // use an aligned char array for padding |
| 106 | + alignas(T) char m_storage[sizeof(T)]; |
| 107 | + |
| 108 | + void* |
| 109 | + get_resource_ptr() override |
| 110 | + { |
| 111 | + return reinterpret_cast<T*>(m_storage); |
| 112 | + } |
| 113 | + |
| 114 | + void |
| 115 | + dispose_resource() override |
| 116 | + { |
| 117 | + // call the deconstruct but not release the memory |
| 118 | + reinterpret_cast<T*>(m_storage)->~T(); |
| 119 | + } |
| 120 | + |
| 121 | + void |
| 122 | + destroy_self() override |
| 123 | + { |
| 124 | + delete this; |
| 125 | + } |
| 126 | +}; |
| 127 | + |
| 128 | +#endif |
0 commit comments