@@ -47,271 +47,6 @@ static inline int comparePointers(const T *left, const T *right) {
4747 return (left == right ? 0 : std::less<const T *>()(left, right) ? -1 : 1 );
4848}
4949
50- template <class EntryTy , bool ProvideDestructor, class Allocator >
51- class ConcurrentMapBase ;
52-
53- // / The partial specialization of ConcurrentMapBase whose destructor is
54- // / trivial. The other implementation inherits from this, so this is a
55- // / base for all ConcurrentMaps.
56- template <class EntryTy , class Allocator >
57- class ConcurrentMapBase <EntryTy, false , Allocator> : protected Allocator {
58- protected:
59- struct Node {
60- std::atomic<Node*> Left;
61- std::atomic<Node*> Right;
62- EntryTy Payload;
63-
64- template <class ... Args>
65- Node (Args &&... args)
66- : Left(nullptr ), Right(nullptr ), Payload(std::forward<Args>(args)...) {}
67-
68- Node (const Node &) = delete ;
69- Node &operator =(const Node &) = delete ;
70-
71- #ifndef NDEBUG
72- void dump () const {
73- auto L = Left.load (std::memory_order_acquire);
74- auto R = Right.load (std::memory_order_acquire);
75- printf (" \" %p\" [ label = \" {<f0> %08lx | {<f1> | <f2>}}\" "
76- " style=\" rounded\" shape=\" record\" ];\n " ,
77- this , (long ) Payload.getKeyValueForDump ());
78-
79- if (L) {
80- L->dump ();
81- printf (" \" %p\" :f1 -> \" %p\" :f0;\n " , this , L);
82- }
83- if (R) {
84- R->dump ();
85- printf (" \" %p\" :f2 -> \" %p\" :f0;\n " , this , R);
86- }
87- }
88- #endif
89- };
90-
91- std::atomic<Node*> Root;
92-
93- constexpr ConcurrentMapBase () : Root(nullptr ) {}
94-
95- // Implicitly trivial destructor.
96- ~ConcurrentMapBase () = default ;
97-
98- void destroyNode (Node *node) {
99- assert (node && " destroying null node" );
100- auto allocSize = sizeof (Node) + node->Payload .getExtraAllocationSize ();
101-
102- // Destroy the node's payload.
103- node->~Node ();
104-
105- // Deallocate the node. The static_cast here is required
106- // because LLVM's allocator API is insane.
107- this ->Deallocate (static_cast <void *>(node), allocSize, alignof (Node));
108- }
109- };
110-
111- // / The partial specialization of ConcurrentMapBase which provides a
112- // / non-trivial destructor.
113- template <class EntryTy , class Allocator >
114- class ConcurrentMapBase <EntryTy, true , Allocator>
115- : protected ConcurrentMapBase<EntryTy, false , Allocator> {
116- protected:
117- using super = ConcurrentMapBase<EntryTy, false , Allocator>;
118- using Node = typename super::Node;
119-
120- constexpr ConcurrentMapBase () {}
121-
122- ~ConcurrentMapBase () {
123- destroyTree (this ->Root );
124- }
125-
126- private:
127- void destroyTree (const std::atomic<Node*> &edge) {
128- // This can be a relaxed load because destruction is not allowed to race
129- // with other operations.
130- auto node = edge.load (std::memory_order_relaxed);
131- if (!node) return ;
132-
133- // Destroy the node's children.
134- destroyTree (node->Left );
135- destroyTree (node->Right );
136-
137- // Destroy the node itself.
138- this ->destroyNode (node);
139- }
140- };
141-
142- // / A concurrent map that is implemented using a binary tree. It supports
143- // / concurrent insertions but does not support removals or rebalancing of
144- // / the tree.
145- // /
146- // / The entry type must provide the following operations:
147- // /
148- // / /// For debugging purposes only. Summarize this key as an integer value.
149- // / intptr_t getKeyIntValueForDump() const;
150- // /
151- // / /// A ternary comparison. KeyTy is the type of the key provided
152- // / /// to find or getOrInsert.
153- // / int compareWithKey(KeyTy key) const;
154- // /
155- // / /// Return the amount of extra trailing space required by an entry,
156- // / /// where KeyTy is the type of the first argument to getOrInsert and
157- // / /// ArgTys is the type of the remaining arguments.
158- // / static size_t getExtraAllocationSize(KeyTy key, ArgTys...)
159- // /
160- // / /// Return the amount of extra trailing space that was requested for
161- // / /// this entry. This method is only used to compute the size of the
162- // / /// object during node deallocation; it does not need to return a
163- // / /// correct value so long as the allocator's Deallocate implementation
164- // / /// ignores this argument.
165- // / size_t getExtraAllocationSize() const;
166- // /
167- // / If ProvideDestructor is false, the destructor will be trivial. This
168- // / can be appropriate when the object is declared at global scope.
169- template <class EntryTy , bool ProvideDestructor = true ,
170- class Allocator = llvm::MallocAllocator>
171- class ConcurrentMap
172- : private ConcurrentMapBase<EntryTy, ProvideDestructor, Allocator> {
173- using super = ConcurrentMapBase<EntryTy, ProvideDestructor, Allocator>;
174-
175- using Node = typename super::Node;
176-
177- // / Inherited from base class:
178- // / std::atomic<Node*> Root;
179- using super::Root;
180-
181- // / This member stores the address of the last node that was found by the
182- // / search procedure. We cache the last search to accelerate code that
183- // / searches the same value in a loop.
184- std::atomic<Node*> LastSearch;
185-
186- public:
187- constexpr ConcurrentMap () : LastSearch(nullptr ) {}
188-
189- ConcurrentMap (const ConcurrentMap &) = delete ;
190- ConcurrentMap &operator =(const ConcurrentMap &) = delete ;
191-
192- // ConcurrentMap<T, false> must have a trivial destructor.
193- ~ConcurrentMap () = default ;
194-
195- public:
196-
197- Allocator &getAllocator () {
198- return *this ;
199- }
200-
201- #ifndef NDEBUG
202- void dump () const {
203- auto R = Root.load (std::memory_order_acquire);
204- printf (" digraph g {\n "
205- " graph [ rankdir = \" TB\" ];\n "
206- " node [ fontsize = \" 16\" ];\n "
207- " edge [ ];\n " );
208- if (R) {
209- R->dump ();
210- }
211- printf (" \n }\n " );
212- }
213- #endif
214-
215- // / Search for a value by key \p Key.
216- // / \returns a pointer to the value or null if the value is not in the map.
217- template <class KeyTy >
218- EntryTy *find (const KeyTy &key) {
219- // Check if we are looking for the same key that we looked for in the last
220- // time we called this function.
221- if (Node *last = LastSearch.load (std::memory_order_acquire)) {
222- if (last->Payload .compareWithKey (key) == 0 )
223- return &last->Payload ;
224- }
225-
226- // Search the tree, starting from the root.
227- Node *node = Root.load (std::memory_order_acquire);
228- while (node) {
229- int comparisonResult = node->Payload .compareWithKey (key);
230- if (comparisonResult == 0 ) {
231- LastSearch.store (node, std::memory_order_release);
232- return &node->Payload ;
233- } else if (comparisonResult < 0 ) {
234- node = node->Left .load (std::memory_order_acquire);
235- } else {
236- node = node->Right .load (std::memory_order_acquire);
237- }
238- }
239-
240- return nullptr ;
241- }
242-
243- // / Get or create an entry in the map.
244- // /
245- // / \returns the entry in the map and whether a new node was added (true)
246- // / or already existed (false)
247- template <class KeyTy , class ... ArgTys>
248- std::pair<EntryTy*, bool > getOrInsert (KeyTy key, ArgTys &&... args) {
249- // Check if we are looking for the same key that we looked for the
250- // last time we called this function.
251- if (Node *last = LastSearch.load (std::memory_order_acquire)) {
252- if (last && last->Payload .compareWithKey (key) == 0 )
253- return { &last->Payload , false };
254- }
255-
256- // The node we allocated.
257- Node *newNode = nullptr ;
258-
259- // Start from the root.
260- auto edge = &Root;
261-
262- while (true ) {
263- // Load the edge.
264- Node *node = edge->load (std::memory_order_acquire);
265-
266- // If there's a node there, it's either a match or we're going to
267- // one of its children.
268- if (node) {
269- searchFromNode:
270-
271- // Compare our key against the node's key.
272- int comparisonResult = node->Payload .compareWithKey (key);
273-
274- // If it's equal, we can use this node.
275- if (comparisonResult == 0 ) {
276- // Destroy the node we allocated before if we're carrying one around.
277- if (newNode) this ->destroyNode (newNode);
278-
279- // Cache and report that we found an existing node.
280- LastSearch.store (node, std::memory_order_release);
281- return { &node->Payload , false };
282- }
283-
284- // Otherwise, select the appropriate child edge and descend.
285- edge = (comparisonResult < 0 ? &node->Left : &node->Right );
286- continue ;
287- }
288-
289- // Create a new node.
290- if (!newNode) {
291- size_t allocSize =
292- sizeof (Node) + EntryTy::getExtraAllocationSize (key, args...);
293- void *memory = this ->Allocate (allocSize, alignof (Node));
294- newNode = ::new (memory) Node (key, std::forward<ArgTys>(args)...);
295- }
296-
297- // Try to set the edge to the new node.
298- if (std::atomic_compare_exchange_strong_explicit (edge, &node, newNode,
299- std::memory_order_acq_rel,
300- std::memory_order_acquire)) {
301- // If that succeeded, cache and report that we created a new node.
302- LastSearch.store (newNode, std::memory_order_release);
303- return { &newNode->Payload , true };
304- }
305-
306- // Otherwise, we lost the race because some other thread initialized
307- // the edge before us. node will be set to the current value;
308- // repeat the search from there.
309- assert (node && " spurious failure from compare_exchange_strong?" );
310- goto searchFromNode;
311- }
312- }
313- };
314-
31550// / A simple linked list representing pointers that need to be freed. This is
31651// / not a concurrent data structure, just a bit of support used in the types
31752// / below.
0 commit comments