/root/doris/be/src/gutil/atomic_refcount.h
Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright 2008 Google Inc. |
2 | | // All rights reserved. |
3 | | |
4 | | // Atomic increment and decrement for reference counting. |
5 | | // For atomic operations on statistics counters and sequence numbers, |
6 | | // see atomic_stats_counter.h and atomic_sequence_num.h respectively. |
7 | | |
8 | | // Some clients use atomic operations for reference counting. |
9 | | // you use one of them: |
10 | | // util/refcount/reference_counted.h |
11 | | // util/gtl/refcounted_ptr.h |
12 | | // util/gtl/shared_ptr.h |
13 | | // Alternatively, use a Mutex to maintain your reference count. |
14 | | // If you really must build your own reference counts with atomic operations, |
15 | | // use the following routines in the way suggested by this example: |
16 | | // AtomicWord ref_count_; // remember to initialize this to 0 |
17 | | // ... |
18 | | // void Ref() { |
19 | | // base::RefCountInc(&this->ref_count_); |
20 | | // } |
21 | | // void Unref() { |
22 | | // if (!base::RefCountDec(&this->ref_count_)) { |
23 | | // delete this; |
24 | | // } |
25 | | // } |
26 | | // Using these routines (rather than the ones in atomicops.h) will provide the |
27 | | // correct semantics; in particular, the memory ordering needed to make |
28 | | // reference counting work will be guaranteed. |
29 | | // You need not declare the reference count word "volatile". After |
30 | | // initialization you should use the word only via the routines below; the |
31 | | // "volatile" in the signatures below is for backwards compatibility. |
32 | | // |
33 | | // If you need to do something very different from this, use a Mutex. |
34 | | |
35 | | #pragma once |
36 | | |
37 | | #include <glog/logging.h> |
38 | | |
39 | | #include "gutil/atomicops.h" |
40 | | |
41 | | namespace base { |
42 | | |
43 | | // These calls are available for both Atomic32, and AtomicWord types, |
44 | | // and also for base::subtle::Atomic64 if available on the platform. |
45 | | |
46 | | // Normally, clients are expected to use RefCountInc/RefCountDec. |
47 | | // In rare cases, it may be necessary to adjust the reference count by |
48 | | // more than 1, in which case they may use RefCountIncN/RefCountDecN. |
49 | | |
50 | | // Increment a reference count by "increment", which must exceed 0. |
51 | 6.60k | inline void RefCountIncN(volatile Atomic32* ptr, Atomic32 increment) { |
52 | 6.60k | DCHECK_GT(increment, 0); |
53 | 6.60k | base::subtle::NoBarrier_AtomicIncrement(ptr, increment); |
54 | 6.60k | } |
55 | | |
56 | | // Decrement a reference count by "decrement", which must exceed 0, |
57 | | // and return whether the result is non-zero. |
58 | | // Insert barriers to ensure that state written before the reference count |
59 | | // became zero will be visible to a thread that has just made the count zero. |
60 | 6.56k | inline bool RefCountDecN(volatile Atomic32* ptr, Atomic32 decrement) { |
61 | 6.56k | DCHECK_GT(decrement, 0); |
62 | 6.56k | bool res = base::subtle::Barrier_AtomicIncrement(ptr, -decrement) != 0; |
63 | 6.56k | return res; |
64 | 6.56k | } |
65 | | |
66 | | // Increment a reference count by 1. |
67 | 6.60k | inline void RefCountInc(volatile Atomic32* ptr) { |
68 | 6.60k | base::RefCountIncN(ptr, 1); |
69 | 6.60k | } |
70 | | |
71 | | // Decrement a reference count by 1 and return whether the result is non-zero. |
72 | | // Insert barriers to ensure that state written before the reference count |
73 | | // became zero will be visible to a thread that has just made the count zero. |
74 | 6.56k | inline bool RefCountDec(volatile Atomic32* ptr) { |
75 | 6.56k | return base::RefCountDecN(ptr, 1); |
76 | 6.56k | } |
77 | | |
78 | | // Return whether the reference count is one. |
79 | | // If the reference count is used in the conventional way, a |
80 | | // reference count of 1 implies that the current thread owns the |
81 | | // reference and no other thread shares it. |
82 | | // This call performs the test for a reference count of one, and |
83 | | // performs the memory barrier needed for the owning thread |
84 | | // to act on the object, knowing that it has exclusive access to the |
85 | | // object. |
86 | 0 | inline bool RefCountIsOne(const volatile Atomic32* ptr) { |
87 | 0 | return base::subtle::Acquire_Load(ptr) == 1; |
88 | 0 | } |
89 | | |
90 | | // Return whether the reference count is zero. With conventional object |
91 | | // referencing counting, the object will be destroyed, so the reference count |
92 | | // should never be zero. Hence this is generally used for a debug check. |
93 | 6.56k | inline bool RefCountIsZero(const volatile Atomic32* ptr) { |
94 | 6.56k | return subtle::Acquire_Load(ptr) == 0; |
95 | 6.56k | } |
96 | | |
97 | | #if BASE_HAS_ATOMIC64 |
98 | | // Implementations for Atomic64, if available. |
99 | 0 | inline void RefCountIncN(volatile base::subtle::Atomic64* ptr, base::subtle::Atomic64 increment) { |
100 | 0 | DCHECK_GT(increment, 0); |
101 | 0 | base::subtle::NoBarrier_AtomicIncrement(ptr, increment); |
102 | 0 | } |
103 | 0 | inline bool RefCountDecN(volatile base::subtle::Atomic64* ptr, base::subtle::Atomic64 decrement) { |
104 | 0 | DCHECK_GT(decrement, 0); |
105 | 0 | return base::subtle::Barrier_AtomicIncrement(ptr, -decrement) != 0; |
106 | 0 | } |
107 | 0 | inline void RefCountInc(volatile base::subtle::Atomic64* ptr) { |
108 | 0 | base::RefCountIncN(ptr, 1); |
109 | 0 | } |
110 | 0 | inline bool RefCountDec(volatile base::subtle::Atomic64* ptr) { |
111 | 0 | return base::RefCountDecN(ptr, 1); |
112 | 0 | } |
113 | 0 | inline bool RefCountIsOne(const volatile base::subtle::Atomic64* ptr) { |
114 | 0 | return base::subtle::Acquire_Load(ptr) == 1; |
115 | 0 | } |
116 | 0 | inline bool RefCountIsZero(const volatile base::subtle::Atomic64* ptr) { |
117 | 0 | return base::subtle::Acquire_Load(ptr) == 0; |
118 | 0 | } |
119 | | #endif |
120 | | |
121 | | #ifdef AtomicWordCastType |
122 | | // Implementations for AtomicWord, if it's a different type from the above. |
123 | | inline void RefCountIncN(volatile AtomicWord* ptr, AtomicWord increment) { |
124 | | base::RefCountIncN(reinterpret_cast<volatile AtomicWordCastType*>(ptr), increment); |
125 | | } |
126 | | inline bool RefCountDecN(volatile AtomicWord* ptr, AtomicWord decrement) { |
127 | | return base::RefCountDecN(reinterpret_cast<volatile AtomicWordCastType*>(ptr), decrement); |
128 | | } |
129 | | inline void RefCountInc(volatile AtomicWord* ptr) { |
130 | | base::RefCountIncN(ptr, 1); |
131 | | } |
132 | | inline bool RefCountDec(volatile AtomicWord* ptr) { |
133 | | return base::RefCountDecN(ptr, 1); |
134 | | } |
135 | | inline bool RefCountIsOne(const volatile AtomicWord* ptr) { |
136 | | return base::subtle::Acquire_Load(reinterpret_cast<const volatile AtomicWordCastType*>(ptr)) == |
137 | | 1; |
138 | | } |
139 | | inline bool RefCountIsZero(const volatile AtomicWord* ptr) { |
140 | | return base::subtle::Acquire_Load(reinterpret_cast<const volatile AtomicWordCastType*>(ptr)) == |
141 | | 0; |
142 | | } |
143 | | #endif |
144 | | |
145 | | } // namespace base |