1414
1515namespace swift {
1616
17- template <typename RefCountBits>
18- HeapObject *RefCounts<RefCountBits>::incrementSlow(RefCountBits oldbits,
17+ // Return an object's side table, allocating it if necessary.
18+ // Returns null if the object is deiniting.
19+ // SideTableRefCountBits specialization intentionally does not exist.
20+ template <>
21+ HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable(bool failIfDeiniting)
22+ {
23+ auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
24+
25+ // Preflight failures before allocating a new side table.
26+ if (oldbits.hasSideTable ()) {
27+ // Already have a side table. Return it.
28+ return oldbits.getSideTable ();
29+ }
30+ else if (failIfDeiniting && oldbits.getIsDeiniting ()) {
31+ // Already past the start of deinit. Do nothing.
32+ return nullptr ;
33+ }
34+
35+ // Preflight passed. Allocate a side table.
36+
37+ // FIXME: custom side table allocator
38+ auto side = swift_cxx_newObject<HeapObjectSideTableEntry>(getHeapObject ());
39+
40+ auto newbits = InlineRefCountBits (side);
41+
42+ do {
43+ if (oldbits.hasSideTable ()) {
44+ // Already have a side table. Return it and delete ours.
45+ // Read before delete to streamline barriers.
46+ auto result = oldbits.getSideTable ();
47+ swift_cxx_deleteObject (side);
48+ return result;
49+ }
50+ else if (failIfDeiniting && oldbits.getIsDeiniting ()) {
51+ // Already past the start of deinit. Do nothing.
52+ return nullptr ;
53+ }
54+
55+ side->initRefCounts (oldbits);
56+
57+ } while (! refCounts.compare_exchange_weak (oldbits, newbits,
58+ std::memory_order_release,
59+ std::memory_order_relaxed));
60+
61+ return side;
62+ }
63+
64+
65+ template <>
66+ HeapObject *RefCounts<InlineRefCountBits>::incrementSlow(InlineRefCountBits oldbits,
1967 uint32_t n) {
2068 if (oldbits.isImmortal (false )) {
2169 return getHeapObject ();
@@ -25,21 +73,28 @@ HeapObject *RefCounts<RefCountBits>::incrementSlow(RefCountBits oldbits,
2573 auto side = oldbits.getSideTable ();
2674 side->incrementStrong (n);
2775 }
76+ else {
77+ // Overflow into a new side table.
78+ auto side = allocateSideTable (false );
79+ side->incrementStrong (n);
80+ }
81+ return getHeapObject ();
82+ }
83+ template <>
84+ HeapObject *RefCounts<SideTableRefCountBits>::incrementSlow(SideTableRefCountBits oldbits,
85+ uint32_t n) {
86+ if (oldbits.isImmortal (false )) {
87+ return getHeapObject ();
88+ }
2889 else {
2990 // Retain count overflow.
3091 swift::swift_abortRetainOverflow ();
3192 }
3293 return getHeapObject ();
3394}
34- template HeapObject *
35- RefCounts<InlineRefCountBits>::incrementSlow(InlineRefCountBits oldbits,
36- uint32_t n);
37- template HeapObject *
38- RefCounts<SideTableRefCountBits>::incrementSlow(SideTableRefCountBits oldbits,
39- uint32_t n);
4095
41- template <typename RefCountBits >
42- void RefCounts<RefCountBits >::incrementNonAtomicSlow(RefCountBits oldbits,
96+ template <>
97+ void RefCounts<InlineRefCountBits >::incrementNonAtomicSlow(InlineRefCountBits oldbits,
4398 uint32_t n) {
4499 if (oldbits.isImmortal (false )) {
45100 return ;
@@ -48,12 +103,20 @@ void RefCounts<RefCountBits>::incrementNonAtomicSlow(RefCountBits oldbits,
48103 // Out-of-line slow path.
49104 auto side = oldbits.getSideTable ();
50105 side->incrementStrong (n); // FIXME: can there be a nonatomic impl?
106+ } else {
107+ // Overflow into a new side table.
108+ auto side = allocateSideTable (false );
109+ side->incrementStrong (n); // FIXME: can there be a nonatomic impl?
110+ }
111+ }
112+ template <>
113+ void RefCounts<SideTableRefCountBits>::incrementNonAtomicSlow(SideTableRefCountBits oldbits, uint32_t n) {
114+ if (oldbits.isImmortal (false )) {
115+ return ;
51116 } else {
52117 swift::swift_abortRetainOverflow ();
53118 }
54119}
55- template void RefCounts<InlineRefCountBits>::incrementNonAtomicSlow(InlineRefCountBits oldbits, uint32_t n);
56- template void RefCounts<SideTableRefCountBits>::incrementNonAtomicSlow(SideTableRefCountBits oldbits, uint32_t n);
57120
58121template <typename RefCountBits>
59122bool RefCounts<RefCountBits>::tryIncrementSlow(RefCountBits oldbits) {
@@ -81,53 +144,6 @@ bool RefCounts<RefCountBits>::tryIncrementNonAtomicSlow(RefCountBits oldbits) {
81144template bool RefCounts<InlineRefCountBits>::tryIncrementNonAtomicSlow(InlineRefCountBits oldbits);
82145template bool RefCounts<SideTableRefCountBits>::tryIncrementNonAtomicSlow(SideTableRefCountBits oldbits);
83146
84- // Return an object's side table, allocating it if necessary.
85- // Returns null if the object is deiniting.
86- // SideTableRefCountBits specialization intentionally does not exist.
87- template <>
88- HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable(bool failIfDeiniting)
89- {
90- auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
91-
92- // Preflight failures before allocating a new side table.
93- if (oldbits.hasSideTable ()) {
94- // Already have a side table. Return it.
95- return oldbits.getSideTable ();
96- }
97- else if (failIfDeiniting && oldbits.getIsDeiniting ()) {
98- // Already past the start of deinit. Do nothing.
99- return nullptr ;
100- }
101-
102- // Preflight passed. Allocate a side table.
103-
104- // FIXME: custom side table allocator
105- auto side = swift_cxx_newObject<HeapObjectSideTableEntry>(getHeapObject ());
106-
107- auto newbits = InlineRefCountBits (side);
108-
109- do {
110- if (oldbits.hasSideTable ()) {
111- // Already have a side table. Return it and delete ours.
112- // Read before delete to streamline barriers.
113- auto result = oldbits.getSideTable ();
114- swift_cxx_deleteObject (side);
115- return result;
116- }
117- else if (failIfDeiniting && oldbits.getIsDeiniting ()) {
118- // Already past the start of deinit. Do nothing.
119- return nullptr ;
120- }
121-
122- side->initRefCounts (oldbits);
123-
124- } while (! refCounts.compare_exchange_weak (oldbits, newbits,
125- std::memory_order_release,
126- std::memory_order_relaxed));
127- return side;
128- }
129-
130-
131147// SideTableRefCountBits specialization intentionally does not exist.
132148template <>
133149HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::formWeakReference()
@@ -183,6 +199,17 @@ bool RefCounts<InlineRefCountBits>::setIsImmutableCOWBuffer(bool immutable) {
183199
184200#endif
185201
202+ template <typename RefCountBits>
203+ void RefCounts<RefCountBits>::dump() const {
204+ printf (" Location: %p\n " , this );
205+ printf (" Strong Ref Count: %d.\n " , getCount ());
206+ printf (" Unowned Ref Count: %d.\n " , getUnownedCount ());
207+ printf (" Weak Ref Count: %d.\n " , getWeakCount ());
208+ printf (" RefCount Side Table: %p.\n " , getSideTable ());
209+ printf (" Is Deiniting: %s.\n " , isDeiniting () ? " true" : " false" );
210+ printf (" Is Immortal: %s.\n " , refCounts.load ().isImmortal (false ) ? " true" : " false" );
211+ }
212+
186213// namespace swift
187214} // namespace swift
188215
0 commit comments