refcount.h (8390B)
1 // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors 2 // Licensed under the MIT License: 3 // 4 // Permission is hereby granted, free of charge, to any person obtaining a copy 5 // of this software and associated documentation files (the "Software"), to deal 6 // in the Software without restriction, including without limitation the rights 7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 // copies of the Software, and to permit persons to whom the Software is 9 // furnished to do so, subject to the following conditions: 10 // 11 // The above copyright notice and this permission notice shall be included in 12 // all copies or substantial portions of the Software. 13 // 14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 20 // THE SOFTWARE. 21 22 #pragma once 23 24 #include "memory.h" 25 26 #if _MSC_VER 27 #if _MSC_VER < 1910 28 #include <intrin.h> 29 #else 30 #include <intrin0.h> 31 #endif 32 #endif 33 34 KJ_BEGIN_HEADER 35 36 namespace kj { 37 38 // ======================================================================================= 39 // Non-atomic (thread-unsafe) refcounting 40 41 class Refcounted: private Disposer { 42 // Subclass this to create a class that contains a reference count. Then, use 43 // `kj::refcounted<T>()` to allocate a new refcounted pointer. 44 // 45 // Do NOT use this lightly. Refcounting is a crutch. Good designs should strive to make object 46 // ownership clear, so that refcounting is not necessary. All that said, reference counting can 47 // sometimes simplify code that would otherwise become convoluted with explicit ownership, even 48 // when ownership relationships are clear at an abstract level. 49 // 50 // NOT THREADSAFE: This refcounting implementation assumes that an object's references are 51 // manipulated only in one thread, because atomic (thread-safe) refcounting is surprisingly slow. 52 // 53 // In general, abstract classes should _not_ subclass this. The concrete class at the bottom 54 // of the hierarchy should be the one to decide how it implements refcounting. Interfaces should 55 // expose only an `addRef()` method that returns `Own<InterfaceType>`. There are two reasons for 56 // this rule: 57 // 1. Interfaces would need to virtually inherit Refcounted, otherwise two refcounted interfaces 58 // could not be inherited by the same subclass. Virtual inheritance is awkward and 59 // inefficient. 60 // 2. An implementation may decide that it would rather return a copy than a refcount, or use 61 // some other strategy. 62 // 63 // TODO(cleanup): Rethink above. Virtual inheritance is not necessarily that bad. OTOH, a 64 // virtual function call for every refcount is sad in its own way. A Ref<T> type to replace 65 // Own<T> could also be nice. 66 67 public: 68 Refcounted() = default; 69 virtual ~Refcounted() noexcept(false); 70 KJ_DISALLOW_COPY(Refcounted); 71 72 inline bool isShared() const { return refcount > 1; } 73 // Check if there are multiple references to this object. This is sometimes useful for deciding 74 // whether it's safe to modify the object vs. make a copy. 75 76 private: 77 mutable uint refcount = 0; 78 // "mutable" because disposeImpl() is const. Bleh. 79 80 void disposeImpl(void* pointer) const override; 81 template <typename T> 82 static Own<T> addRefInternal(T* object); 83 84 template <typename T> 85 friend Own<T> addRef(T& object); 86 template <typename T, typename... Params> 87 friend Own<T> refcounted(Params&&... params); 88 }; 89 90 template <typename T, typename... Params> 91 inline Own<T> refcounted(Params&&... params) { 92 // Allocate a new refcounted instance of T, passing `params` to its constructor. Returns an 93 // initial reference to the object. More references can be created with `kj::addRef()`. 94 95 return Refcounted::addRefInternal(new T(kj::fwd<Params>(params)...)); 96 } 97 98 template <typename T> 99 Own<T> addRef(T& object) { 100 // Return a new reference to `object`, which must subclass Refcounted and have been allocated 101 // using `kj::refcounted<>()`. It is suggested that subclasses implement a non-static addRef() 102 // method which wraps this and returns the appropriate type. 103 104 KJ_IREQUIRE(object.Refcounted::refcount > 0, "Object not allocated with kj::refcounted()."); 105 return Refcounted::addRefInternal(&object); 106 } 107 108 template <typename T> 109 Own<T> Refcounted::addRefInternal(T* object) { 110 Refcounted* refcounted = object; 111 ++refcounted->refcount; 112 return Own<T>(object, *refcounted); 113 } 114 115 // ======================================================================================= 116 // Atomic (thread-safe) refcounting 117 // 118 // Warning: Atomic ops are SLOW. 119 120 #if _MSC_VER && !defined(__clang__) 121 #if _M_ARM 122 #define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP##_##MEM 123 #else 124 #define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP 125 #endif 126 #endif 127 128 class AtomicRefcounted: private kj::Disposer { 129 public: 130 AtomicRefcounted() = default; 131 virtual ~AtomicRefcounted() noexcept(false); 132 KJ_DISALLOW_COPY(AtomicRefcounted); 133 134 inline bool isShared() const { 135 #if _MSC_VER && !defined(__clang__) 136 return KJ_MSVC_INTERLOCKED(Or, acq)(&refcount, 0) > 1; 137 #else 138 return __atomic_load_n(&refcount, __ATOMIC_ACQUIRE) > 1; 139 #endif 140 } 141 142 private: 143 #if _MSC_VER && !defined(__clang__) 144 mutable volatile long refcount = 0; 145 #else 146 mutable volatile uint refcount = 0; 147 #endif 148 149 bool addRefWeakInternal() const; 150 151 void disposeImpl(void* pointer) const override; 152 template <typename T> 153 static kj::Own<T> addRefInternal(T* object); 154 template <typename T> 155 static kj::Own<const T> addRefInternal(const T* object); 156 157 template <typename T> 158 friend kj::Own<T> atomicAddRef(T& object); 159 template <typename T> 160 friend kj::Own<const T> atomicAddRef(const T& object); 161 template <typename T> 162 friend kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object); 163 template <typename T, typename... Params> 164 friend kj::Own<T> atomicRefcounted(Params&&... params); 165 }; 166 167 template <typename T, typename... Params> 168 inline kj::Own<T> atomicRefcounted(Params&&... params) { 169 return AtomicRefcounted::addRefInternal(new T(kj::fwd<Params>(params)...)); 170 } 171 172 template <typename T> 173 kj::Own<T> atomicAddRef(T& object) { 174 KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, 175 "Object not allocated with kj::atomicRefcounted()."); 176 return AtomicRefcounted::addRefInternal(&object); 177 } 178 179 template <typename T> 180 kj::Own<const T> atomicAddRef(const T& object) { 181 KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, 182 "Object not allocated with kj::atomicRefcounted()."); 183 return AtomicRefcounted::addRefInternal(&object); 184 } 185 186 template <typename T> 187 kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object) { 188 // Try to addref an object whose refcount could have already reached zero in another thread, and 189 // whose destructor could therefore already have started executing. The destructor must contain 190 // some synchronization that guarantees that said destructor has not yet completed when 191 // attomicAddRefWeak() is called (so that the object is still valid). Since the destructor cannot 192 // be canceled once it has started, in the case that it has already started, this function 193 // returns nullptr. 194 195 const AtomicRefcounted* refcounted = &object; 196 if (refcounted->addRefWeakInternal()) { 197 return kj::Own<const T>(&object, *refcounted); 198 } else { 199 return nullptr; 200 } 201 } 202 203 template <typename T> 204 kj::Own<T> AtomicRefcounted::addRefInternal(T* object) { 205 AtomicRefcounted* refcounted = object; 206 #if _MSC_VER && !defined(__clang__) 207 KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount); 208 #else 209 __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED); 210 #endif 211 return kj::Own<T>(object, *refcounted); 212 } 213 214 template <typename T> 215 kj::Own<const T> AtomicRefcounted::addRefInternal(const T* object) { 216 const AtomicRefcounted* refcounted = object; 217 #if _MSC_VER && !defined(__clang__) 218 KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount); 219 #else 220 __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED); 221 #endif 222 return kj::Own<const T>(object, *refcounted); 223 } 224 225 } // namespace kj 226 227 KJ_END_HEADER