Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-11-19 09:50:48

0001 #ifndef Py_INTERNAL_OBJECT_H
0002 #define Py_INTERNAL_OBJECT_H
0003 #ifdef __cplusplus
0004 extern "C" {
0005 #endif
0006 
0007 #ifndef Py_BUILD_CORE
0008 #  error "this header requires Py_BUILD_CORE define"
0009 #endif
0010 
0011 #include <stdbool.h>
0012 #include "pycore_gc.h"            // _PyObject_GC_IS_TRACKED()
0013 #include "pycore_emscripten_trampoline.h" // _PyCFunction_TrampolineCall()
0014 #include "pycore_interp.h"        // PyInterpreterState.gc
0015 #include "pycore_pyatomic_ft_wrappers.h"  // FT_ATOMIC_STORE_PTR_RELAXED
0016 #include "pycore_pystate.h"       // _PyInterpreterState_GET()
0017 
0018 
0019 #define _Py_IMMORTAL_REFCNT_LOOSE ((_Py_IMMORTAL_REFCNT >> 1) + 1)
0020 
0021 // gh-121528, gh-118997: Similar to _Py_IsImmortal() but be more loose when
0022 // comparing the reference count to stay compatible with C extensions built
0023 // with the stable ABI 3.11 or older. Such extensions implement INCREF/DECREF
0024 // as refcnt++ and refcnt-- without taking in account immortal objects. For
0025 // example, the reference count of an immortal object can change from
0026 // _Py_IMMORTAL_REFCNT to _Py_IMMORTAL_REFCNT+1 (INCREF) or
0027 // _Py_IMMORTAL_REFCNT-1 (DECREF).
0028 //
0029 // This function should only be used in assertions. Otherwise, _Py_IsImmortal()
0030 // must be used instead.
0031 static inline int _Py_IsImmortalLoose(PyObject *op)
0032 {
0033 #if defined(Py_GIL_DISABLED)
0034     return _Py_IsImmortal(op);
0035 #else
0036     return (op->ob_refcnt >= _Py_IMMORTAL_REFCNT_LOOSE);
0037 #endif
0038 }
0039 #define _Py_IsImmortalLoose(op) _Py_IsImmortalLoose(_PyObject_CAST(op))
0040 
0041 
0042 /* Check if an object is consistent. For example, ensure that the reference
0043    counter is greater than or equal to 1, and ensure that ob_type is not NULL.
0044 
0045    Call _PyObject_AssertFailed() if the object is inconsistent.
0046 
0047    If check_content is zero, only check header fields: reduce the overhead.
0048 
0049    The function always return 1. The return value is just here to be able to
0050    write:
0051 
0052    assert(_PyObject_CheckConsistency(obj, 1)); */
0053 extern int _PyObject_CheckConsistency(PyObject *op, int check_content);
0054 
0055 extern void _PyDebugAllocatorStats(FILE *out, const char *block_name,
0056                                    int num_blocks, size_t sizeof_block);
0057 
0058 extern void _PyObject_DebugTypeStats(FILE *out);
0059 
0060 #ifdef Py_TRACE_REFS
0061 // Forget a reference registered by _Py_NewReference(). Function called by
0062 // _Py_Dealloc().
0063 //
0064 // On a free list, the function can be used before modifying an object to
0065 // remove the object from traced objects. Then _Py_NewReference() or
0066 // _Py_NewReferenceNoTotal() should be called again on the object to trace
0067 // it again.
0068 extern void _Py_ForgetReference(PyObject *);
0069 #endif
0070 
0071 // Export for shared _testinternalcapi extension
0072 PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *);
0073 
0074 /* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid
0075    designated initializer conflicts in C++20. If we use the deinition in
0076    object.h, we will be mixing designated and non-designated initializers in
0077    pycore objects which is forbiddent in C++20. However, if we then use
0078    designated initializers in object.h then Extensions without designated break.
0079    Furthermore, we can't use designated initializers in Extensions since these
0080    are not supported pre-C++20. Thus, keeping an internal copy here is the most
0081    backwards compatible solution */
0082 #if defined(Py_GIL_DISABLED)
0083 #define _PyObject_HEAD_INIT(type)                   \
0084     {                                               \
0085         .ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL,  \
0086         .ob_type = (type)                           \
0087     }
0088 #else
0089 #define _PyObject_HEAD_INIT(type)         \
0090     {                                     \
0091         .ob_refcnt = _Py_IMMORTAL_REFCNT, \
0092         .ob_type = (type)                 \
0093     }
0094 #endif
0095 #define _PyVarObject_HEAD_INIT(type, size)    \
0096     {                                         \
0097         .ob_base = _PyObject_HEAD_INIT(type), \
0098         .ob_size = size                       \
0099     }
0100 
0101 PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc(
0102     const char *func,
0103     const char *message);
0104 
0105 #define _Py_FatalRefcountError(message) \
0106     _Py_FatalRefcountErrorFunc(__func__, (message))
0107 
0108 #define _PyReftracerTrack(obj, operation) \
0109     do { \
0110         struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; \
0111         if (tracer->tracer_func != NULL) { \
0112             void *data = tracer->tracer_data; \
0113             tracer->tracer_func((obj), (operation), data); \
0114         } \
0115     } while(0)
0116 
0117 #ifdef Py_REF_DEBUG
0118 /* The symbol is only exposed in the API for the sake of extensions
0119    built against the pre-3.12 stable ABI. */
0120 PyAPI_DATA(Py_ssize_t) _Py_RefTotal;
0121 
0122 extern void _Py_AddRefTotal(PyThreadState *, Py_ssize_t);
0123 extern void _Py_IncRefTotal(PyThreadState *);
0124 extern void _Py_DecRefTotal(PyThreadState *);
0125 
0126 #  define _Py_DEC_REFTOTAL(interp) \
0127     interp->object_state.reftotal--
0128 #endif
0129 
0130 // Increment reference count by n
0131 static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
0132 {
0133     if (_Py_IsImmortal(op)) {
0134         return;
0135     }
0136 #ifdef Py_REF_DEBUG
0137     _Py_AddRefTotal(_PyThreadState_GET(), n);
0138 #endif
0139 #if !defined(Py_GIL_DISABLED)
0140     op->ob_refcnt += n;
0141 #else
0142     if (_Py_IsOwnedByCurrentThread(op)) {
0143         uint32_t local = op->ob_ref_local;
0144         Py_ssize_t refcnt = (Py_ssize_t)local + n;
0145 #  if PY_SSIZE_T_MAX > UINT32_MAX
0146         if (refcnt > (Py_ssize_t)UINT32_MAX) {
0147             // Make the object immortal if the 32-bit local reference count
0148             // would overflow.
0149             refcnt = _Py_IMMORTAL_REFCNT_LOCAL;
0150         }
0151 #  endif
0152         _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, (uint32_t)refcnt);
0153     }
0154     else {
0155         _Py_atomic_add_ssize(&op->ob_ref_shared, (n << _Py_REF_SHARED_SHIFT));
0156     }
0157 #endif
0158     // Although the ref count was increased by `n` (which may be greater than 1)
0159     // it is only a single increment (i.e. addition) operation, so only 1 refcnt
0160     // increment operation is counted.
0161     _Py_INCREF_STAT_INC();
0162 }
0163 #define _Py_RefcntAdd(op, n) _Py_RefcntAdd(_PyObject_CAST(op), n)
0164 
0165 extern void _Py_SetImmortal(PyObject *op);
0166 extern void _Py_SetImmortalUntracked(PyObject *op);
0167 
0168 // Checks if an object has a single, unique reference. If the caller holds a
0169 // unique reference, it may be able to safely modify the object in-place.
0170 static inline int
0171 _PyObject_IsUniquelyReferenced(PyObject *ob)
0172 {
0173 #if !defined(Py_GIL_DISABLED)
0174     return Py_REFCNT(ob) == 1;
0175 #else
0176     // NOTE: the entire ob_ref_shared field must be zero, including flags, to
0177     // ensure that other threads cannot concurrently create new references to
0178     // this object.
0179     return (_Py_IsOwnedByCurrentThread(ob) &&
0180             _Py_atomic_load_uint32_relaxed(&ob->ob_ref_local) == 1 &&
0181             _Py_atomic_load_ssize_relaxed(&ob->ob_ref_shared) == 0);
0182 #endif
0183 }
0184 
0185 // Makes an immortal object mortal again with the specified refcnt. Should only
0186 // be used during runtime finalization.
0187 static inline void _Py_SetMortal(PyObject *op, Py_ssize_t refcnt)
0188 {
0189     if (op) {
0190         assert(_Py_IsImmortalLoose(op));
0191 #ifdef Py_GIL_DISABLED
0192         op->ob_tid = _Py_UNOWNED_TID;
0193         op->ob_ref_local = 0;
0194         op->ob_ref_shared = _Py_REF_SHARED(refcnt, _Py_REF_MERGED);
0195 #else
0196         op->ob_refcnt = refcnt;
0197 #endif
0198     }
0199 }
0200 
0201 /* _Py_ClearImmortal() should only be used during runtime finalization. */
0202 static inline void _Py_ClearImmortal(PyObject *op)
0203 {
0204     if (op) {
0205         _Py_SetMortal(op, 1);
0206         Py_DECREF(op);
0207     }
0208 }
0209 #define _Py_ClearImmortal(op) \
0210     do { \
0211         _Py_ClearImmortal(_PyObject_CAST(op)); \
0212         op = NULL; \
0213     } while (0)
0214 
0215 // Mark an object as supporting deferred reference counting. This is a no-op
0216 // in the default (with GIL) build. Objects that use deferred reference
0217 // counting should be tracked by the GC so that they are eventually collected.
0218 extern void _PyObject_SetDeferredRefcount(PyObject *op);
0219 
0220 static inline int
0221 _PyObject_HasDeferredRefcount(PyObject *op)
0222 {
0223 #ifdef Py_GIL_DISABLED
0224     return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_DEFERRED);
0225 #else
0226     return 0;
0227 #endif
0228 }
0229 
0230 #if !defined(Py_GIL_DISABLED)
0231 static inline void
0232 _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
0233 {
0234     if (_Py_IsImmortal(op)) {
0235         return;
0236     }
0237     _Py_DECREF_STAT_INC();
0238 #ifdef Py_REF_DEBUG
0239     _Py_DEC_REFTOTAL(PyInterpreterState_Get());
0240 #endif
0241     if (--op->ob_refcnt != 0) {
0242         assert(op->ob_refcnt > 0);
0243     }
0244     else {
0245 #ifdef Py_TRACE_REFS
0246         _Py_ForgetReference(op);
0247 #endif
0248         _PyReftracerTrack(op, PyRefTracer_DESTROY);
0249         destruct(op);
0250     }
0251 }
0252 
0253 static inline void
0254 _Py_DECREF_NO_DEALLOC(PyObject *op)
0255 {
0256     if (_Py_IsImmortal(op)) {
0257         return;
0258     }
0259     _Py_DECREF_STAT_INC();
0260 #ifdef Py_REF_DEBUG
0261     _Py_DEC_REFTOTAL(PyInterpreterState_Get());
0262 #endif
0263     op->ob_refcnt--;
0264 #ifdef Py_DEBUG
0265     if (op->ob_refcnt <= 0) {
0266         _Py_FatalRefcountError("Expected a positive remaining refcount");
0267     }
0268 #endif
0269 }
0270 
0271 #else
0272 // TODO: implement Py_DECREF specializations for Py_GIL_DISABLED build
0273 static inline void
0274 _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
0275 {
0276     Py_DECREF(op);
0277 }
0278 
0279 static inline void
0280 _Py_DECREF_NO_DEALLOC(PyObject *op)
0281 {
0282     Py_DECREF(op);
0283 }
0284 
0285 static inline int
0286 _Py_REF_IS_MERGED(Py_ssize_t ob_ref_shared)
0287 {
0288     return (ob_ref_shared & _Py_REF_SHARED_FLAG_MASK) == _Py_REF_MERGED;
0289 }
0290 
0291 static inline int
0292 _Py_REF_IS_QUEUED(Py_ssize_t ob_ref_shared)
0293 {
0294     return (ob_ref_shared & _Py_REF_SHARED_FLAG_MASK) == _Py_REF_QUEUED;
0295 }
0296 
0297 // Merge the local and shared reference count fields and add `extra` to the
0298 // refcount when merging.
0299 Py_ssize_t _Py_ExplicitMergeRefcount(PyObject *op, Py_ssize_t extra);
0300 #endif // !defined(Py_GIL_DISABLED)
0301 
0302 #ifdef Py_REF_DEBUG
0303 #  undef _Py_DEC_REFTOTAL
0304 #endif
0305 
0306 
0307 extern int _PyType_CheckConsistency(PyTypeObject *type);
0308 extern int _PyDict_CheckConsistency(PyObject *mp, int check_content);
0309 
0310 /* Update the Python traceback of an object. This function must be called
0311    when a memory block is reused from a free list.
0312 
0313    Internal function called by _Py_NewReference(). */
0314 extern int _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void*);
0315 
0316 // Fast inlined version of PyType_HasFeature()
0317 static inline int
0318 _PyType_HasFeature(PyTypeObject *type, unsigned long feature) {
0319     return ((FT_ATOMIC_LOAD_ULONG_RELAXED(type->tp_flags) & feature) != 0);
0320 }
0321 
0322 extern void _PyType_InitCache(PyInterpreterState *interp);
0323 
0324 extern PyStatus _PyObject_InitState(PyInterpreterState *interp);
0325 extern void _PyObject_FiniState(PyInterpreterState *interp);
0326 extern bool _PyRefchain_IsTraced(PyInterpreterState *interp, PyObject *obj);
0327 
0328 /* Inline functions trading binary compatibility for speed:
0329    _PyObject_Init() is the fast version of PyObject_Init(), and
0330    _PyObject_InitVar() is the fast version of PyObject_InitVar().
0331 
0332    These inline functions must not be called with op=NULL. */
0333 static inline void
0334 _PyObject_Init(PyObject *op, PyTypeObject *typeobj)
0335 {
0336     assert(op != NULL);
0337     Py_SET_TYPE(op, typeobj);
0338     assert(_PyType_HasFeature(typeobj, Py_TPFLAGS_HEAPTYPE) || _Py_IsImmortalLoose(typeobj));
0339     Py_INCREF(typeobj);
0340     _Py_NewReference(op);
0341 }
0342 
0343 static inline void
0344 _PyObject_InitVar(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size)
0345 {
0346     assert(op != NULL);
0347     assert(typeobj != &PyLong_Type);
0348     _PyObject_Init((PyObject *)op, typeobj);
0349     Py_SET_SIZE(op, size);
0350 }
0351 
0352 
0353 /* Tell the GC to track this object.
0354  *
0355  * The object must not be tracked by the GC.
0356  *
0357  * NB: While the object is tracked by the collector, it must be safe to call the
0358  * ob_traverse method.
0359  *
0360  * Internal note: interp->gc.generation0->_gc_prev doesn't have any bit flags
0361  * because it's not object header.  So we don't use _PyGCHead_PREV() and
0362  * _PyGCHead_SET_PREV() for it to avoid unnecessary bitwise operations.
0363  *
0364  * See also the public PyObject_GC_Track() function.
0365  */
0366 static inline void _PyObject_GC_TRACK(
0367 // The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
0368 #ifndef NDEBUG
0369     const char *filename, int lineno,
0370 #endif
0371     PyObject *op)
0372 {
0373     _PyObject_ASSERT_FROM(op, !_PyObject_GC_IS_TRACKED(op),
0374                           "object already tracked by the garbage collector",
0375                           filename, lineno, __func__);
0376 #ifdef Py_GIL_DISABLED
0377     _PyObject_SET_GC_BITS(op, _PyGC_BITS_TRACKED);
0378 #else
0379     PyGC_Head *gc = _Py_AS_GC(op);
0380     _PyObject_ASSERT_FROM(op,
0381                           (gc->_gc_prev & _PyGC_PREV_MASK_COLLECTING) == 0,
0382                           "object is in generation which is garbage collected",
0383                           filename, lineno, __func__);
0384 
0385     PyInterpreterState *interp = _PyInterpreterState_GET();
0386     PyGC_Head *generation0 = interp->gc.generation0;
0387     PyGC_Head *last = (PyGC_Head*)(generation0->_gc_prev);
0388     _PyGCHead_SET_NEXT(last, gc);
0389     _PyGCHead_SET_PREV(gc, last);
0390     _PyGCHead_SET_NEXT(gc, generation0);
0391     generation0->_gc_prev = (uintptr_t)gc;
0392 #endif
0393 }
0394 
0395 /* Tell the GC to stop tracking this object.
0396  *
0397  * Internal note: This may be called while GC. So _PyGC_PREV_MASK_COLLECTING
0398  * must be cleared. But _PyGC_PREV_MASK_FINALIZED bit is kept.
0399  *
0400  * The object must be tracked by the GC.
0401  *
0402  * See also the public PyObject_GC_UnTrack() which accept an object which is
0403  * not tracked.
0404  */
0405 static inline void _PyObject_GC_UNTRACK(
0406 // The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
0407 #ifndef NDEBUG
0408     const char *filename, int lineno,
0409 #endif
0410     PyObject *op)
0411 {
0412     _PyObject_ASSERT_FROM(op, _PyObject_GC_IS_TRACKED(op),
0413                           "object not tracked by the garbage collector",
0414                           filename, lineno, __func__);
0415 
0416 #ifdef Py_GIL_DISABLED
0417     _PyObject_CLEAR_GC_BITS(op, _PyGC_BITS_TRACKED);
0418 #else
0419     PyGC_Head *gc = _Py_AS_GC(op);
0420     PyGC_Head *prev = _PyGCHead_PREV(gc);
0421     PyGC_Head *next = _PyGCHead_NEXT(gc);
0422     _PyGCHead_SET_NEXT(prev, next);
0423     _PyGCHead_SET_PREV(next, prev);
0424     gc->_gc_next = 0;
0425     gc->_gc_prev &= _PyGC_PREV_MASK_FINALIZED;
0426 #endif
0427 }
0428 
0429 // Macros to accept any type for the parameter, and to automatically pass
0430 // the filename and the filename (if NDEBUG is not defined) where the macro
0431 // is called.
0432 #ifdef NDEBUG
0433 #  define _PyObject_GC_TRACK(op) \
0434         _PyObject_GC_TRACK(_PyObject_CAST(op))
0435 #  define _PyObject_GC_UNTRACK(op) \
0436         _PyObject_GC_UNTRACK(_PyObject_CAST(op))
0437 #else
0438 #  define _PyObject_GC_TRACK(op) \
0439         _PyObject_GC_TRACK(__FILE__, __LINE__, _PyObject_CAST(op))
0440 #  define _PyObject_GC_UNTRACK(op) \
0441         _PyObject_GC_UNTRACK(__FILE__, __LINE__, _PyObject_CAST(op))
0442 #endif
0443 
0444 #ifdef Py_GIL_DISABLED
0445 
0446 /* Tries to increment an object's reference count
0447  *
0448  * This is a specialized version of _Py_TryIncref that only succeeds if the
0449  * object is immortal or local to this thread. It does not handle the case
0450  * where the  reference count modification requires an atomic operation. This
0451  * allows call sites to specialize for the immortal/local case.
0452  */
0453 static inline int
0454 _Py_TryIncrefFast(PyObject *op) {
0455     uint32_t local = _Py_atomic_load_uint32_relaxed(&op->ob_ref_local);
0456     local += 1;
0457     if (local == 0) {
0458         // immortal
0459         return 1;
0460     }
0461     if (_Py_IsOwnedByCurrentThread(op)) {
0462         _Py_INCREF_STAT_INC();
0463         _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, local);
0464 #ifdef Py_REF_DEBUG
0465         _Py_IncRefTotal(_PyThreadState_GET());
0466 #endif
0467         return 1;
0468     }
0469     return 0;
0470 }
0471 
0472 static inline int
0473 _Py_TryIncRefShared(PyObject *op)
0474 {
0475     Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
0476     for (;;) {
0477         // If the shared refcount is zero and the object is either merged
0478         // or may not have weak references, then we cannot incref it.
0479         if (shared == 0 || shared == _Py_REF_MERGED) {
0480             return 0;
0481         }
0482 
0483         if (_Py_atomic_compare_exchange_ssize(
0484                 &op->ob_ref_shared,
0485                 &shared,
0486                 shared + (1 << _Py_REF_SHARED_SHIFT))) {
0487 #ifdef Py_REF_DEBUG
0488             _Py_IncRefTotal(_PyThreadState_GET());
0489 #endif
0490             _Py_INCREF_STAT_INC();
0491             return 1;
0492         }
0493     }
0494 }
0495 
0496 /* Tries to incref the object op and ensures that *src still points to it. */
0497 static inline int
0498 _Py_TryIncrefCompare(PyObject **src, PyObject *op)
0499 {
0500     if (_Py_TryIncrefFast(op)) {
0501         return 1;
0502     }
0503     if (!_Py_TryIncRefShared(op)) {
0504         return 0;
0505     }
0506     if (op != _Py_atomic_load_ptr(src)) {
0507         Py_DECREF(op);
0508         return 0;
0509     }
0510     return 1;
0511 }
0512 
0513 /* Loads and increfs an object from ptr, which may contain a NULL value.
0514    Safe with concurrent (atomic) updates to ptr.
0515    NOTE: The writer must set maybe-weakref on the stored object! */
0516 static inline PyObject *
0517 _Py_XGetRef(PyObject **ptr)
0518 {
0519     for (;;) {
0520         PyObject *value = _Py_atomic_load_ptr(ptr);
0521         if (value == NULL) {
0522             return value;
0523         }
0524         if (_Py_TryIncrefCompare(ptr, value)) {
0525             return value;
0526         }
0527     }
0528 }
0529 
0530 /* Attempts to loads and increfs an object from ptr. Returns NULL
0531    on failure, which may be due to a NULL value or a concurrent update. */
0532 static inline PyObject *
0533 _Py_TryXGetRef(PyObject **ptr)
0534 {
0535     PyObject *value = _Py_atomic_load_ptr(ptr);
0536     if (value == NULL) {
0537         return value;
0538     }
0539     if (_Py_TryIncrefCompare(ptr, value)) {
0540         return value;
0541     }
0542     return NULL;
0543 }
0544 
0545 /* Like Py_NewRef but also optimistically sets _Py_REF_MAYBE_WEAKREF
0546    on objects owned by a different thread. */
0547 static inline PyObject *
0548 _Py_NewRefWithLock(PyObject *op)
0549 {
0550     if (_Py_TryIncrefFast(op)) {
0551         return op;
0552     }
0553 #ifdef Py_REF_DEBUG
0554     _Py_IncRefTotal(_PyThreadState_GET());
0555 #endif
0556     _Py_INCREF_STAT_INC();
0557     for (;;) {
0558         Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
0559         Py_ssize_t new_shared = shared + (1 << _Py_REF_SHARED_SHIFT);
0560         if ((shared & _Py_REF_SHARED_FLAG_MASK) == 0) {
0561             new_shared |= _Py_REF_MAYBE_WEAKREF;
0562         }
0563         if (_Py_atomic_compare_exchange_ssize(
0564                 &op->ob_ref_shared,
0565                 &shared,
0566                 new_shared)) {
0567             return op;
0568         }
0569     }
0570 }
0571 
0572 static inline PyObject *
0573 _Py_XNewRefWithLock(PyObject *obj)
0574 {
0575     if (obj == NULL) {
0576         return NULL;
0577     }
0578     return _Py_NewRefWithLock(obj);
0579 }
0580 
0581 static inline void
0582 _PyObject_SetMaybeWeakref(PyObject *op)
0583 {
0584     if (_Py_IsImmortal(op)) {
0585         return;
0586     }
0587     for (;;) {
0588         Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
0589         if ((shared & _Py_REF_SHARED_FLAG_MASK) != 0) {
0590             // Nothing to do if it's in WEAKREFS, QUEUED, or MERGED states.
0591             return;
0592         }
0593         if (_Py_atomic_compare_exchange_ssize(
0594                 &op->ob_ref_shared, &shared, shared | _Py_REF_MAYBE_WEAKREF)) {
0595             return;
0596         }
0597     }
0598 }
0599 
0600 extern int _PyObject_ResurrectEndSlow(PyObject *op);
0601 #endif
0602 
0603 // Temporarily resurrects an object during deallocation. The refcount is set
0604 // to one.
0605 static inline void
0606 _PyObject_ResurrectStart(PyObject *op)
0607 {
0608     assert(Py_REFCNT(op) == 0);
0609 #ifdef Py_REF_DEBUG
0610     _Py_IncRefTotal(_PyThreadState_GET());
0611 #endif
0612 #ifdef Py_GIL_DISABLED
0613     _Py_atomic_store_uintptr_relaxed(&op->ob_tid, _Py_ThreadId());
0614     _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, 1);
0615     _Py_atomic_store_ssize_relaxed(&op->ob_ref_shared, 0);
0616 #else
0617     Py_SET_REFCNT(op, 1);
0618 #endif
0619 }
0620 
0621 // Undoes an object resurrection by decrementing the refcount without calling
0622 // _Py_Dealloc(). Returns 0 if the object is dead (the normal case), and
0623 // deallocation should continue. Returns 1 if the object is still alive.
0624 static inline int
0625 _PyObject_ResurrectEnd(PyObject *op)
0626 {
0627 #ifdef Py_REF_DEBUG
0628     _Py_DecRefTotal(_PyThreadState_GET());
0629 #endif
0630 #ifndef Py_GIL_DISABLED
0631     Py_SET_REFCNT(op, Py_REFCNT(op) - 1);
0632     return Py_REFCNT(op) != 0;
0633 #else
0634     uint32_t local = _Py_atomic_load_uint32_relaxed(&op->ob_ref_local);
0635     Py_ssize_t shared = _Py_atomic_load_ssize_acquire(&op->ob_ref_shared);
0636     if (_Py_IsOwnedByCurrentThread(op) && local == 1 && shared == 0) {
0637         // Fast-path: object has a single refcount and is owned by this thread
0638         _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, 0);
0639         return 0;
0640     }
0641     // Slow-path: object has a shared refcount or is not owned by this thread
0642     return _PyObject_ResurrectEndSlow(op);
0643 #endif
0644 }
0645 
0646 /* Tries to incref op and returns 1 if successful or 0 otherwise. */
0647 static inline int
0648 _Py_TryIncref(PyObject *op)
0649 {
0650 #ifdef Py_GIL_DISABLED
0651     return _Py_TryIncrefFast(op) || _Py_TryIncRefShared(op);
0652 #else
0653     if (Py_REFCNT(op) > 0) {
0654         Py_INCREF(op);
0655         return 1;
0656     }
0657     return 0;
0658 #endif
0659 }
0660 
0661 #ifdef Py_REF_DEBUG
0662 extern void _PyInterpreterState_FinalizeRefTotal(PyInterpreterState *);
0663 extern void _Py_FinalizeRefTotal(_PyRuntimeState *);
0664 extern void _PyDebug_PrintTotalRefs(void);
0665 #endif
0666 
0667 #ifdef Py_TRACE_REFS
0668 extern void _Py_AddToAllObjects(PyObject *op);
0669 extern void _Py_PrintReferences(PyInterpreterState *, FILE *);
0670 extern void _Py_PrintReferenceAddresses(PyInterpreterState *, FILE *);
0671 #endif
0672 
0673 
0674 /* Return the *address* of the object's weaklist.  The address may be
0675  * dereferenced to get the current head of the weaklist.  This is useful
0676  * for iterating over the linked list of weakrefs, especially when the
0677  * list is being modified externally (e.g. refs getting removed).
0678  *
0679  * The returned pointer should not be used to change the head of the list
0680  * nor should it be used to add, remove, or swap any refs in the list.
0681  * That is the sole responsibility of the code in weakrefobject.c.
0682  */
0683 static inline PyObject **
0684 _PyObject_GET_WEAKREFS_LISTPTR(PyObject *op)
0685 {
0686     if (PyType_Check(op) &&
0687             ((PyTypeObject *)op)->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) {
0688         PyInterpreterState *interp = _PyInterpreterState_GET();
0689         managed_static_type_state *state = _PyStaticType_GetState(
0690                                                 interp, (PyTypeObject *)op);
0691         return _PyStaticType_GET_WEAKREFS_LISTPTR(state);
0692     }
0693     // Essentially _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET():
0694     Py_ssize_t offset = Py_TYPE(op)->tp_weaklistoffset;
0695     return (PyObject **)((char *)op + offset);
0696 }
0697 
0698 /* This is a special case of _PyObject_GET_WEAKREFS_LISTPTR().
0699  * Only the most fundamental lookup path is used.
0700  * Consequently, static types should not be used.
0701  *
0702  * For static builtin types the returned pointer will always point
0703  * to a NULL tp_weaklist.  This is fine for any deallocation cases,
0704  * since static types are never deallocated and static builtin types
0705  * are only finalized at the end of runtime finalization.
0706  *
0707  * If the weaklist for static types is actually needed then use
0708  * _PyObject_GET_WEAKREFS_LISTPTR().
0709  */
0710 static inline PyWeakReference **
0711 _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET(PyObject *op)
0712 {
0713     assert(!PyType_Check(op) ||
0714             ((PyTypeObject *)op)->tp_flags & Py_TPFLAGS_HEAPTYPE);
0715     Py_ssize_t offset = Py_TYPE(op)->tp_weaklistoffset;
0716     return (PyWeakReference **)((char *)op + offset);
0717 }
0718 
0719 // Fast inlined version of PyObject_IS_GC()
0720 static inline int
0721 _PyObject_IS_GC(PyObject *obj)
0722 {
0723     PyTypeObject *type = Py_TYPE(obj);
0724     return (PyType_IS_GC(type)
0725             && (type->tp_is_gc == NULL || type->tp_is_gc(obj)));
0726 }
0727 
0728 // Fast inlined version of PyObject_Hash()
0729 static inline Py_hash_t
0730 _PyObject_HashFast(PyObject *op)
0731 {
0732     if (PyUnicode_CheckExact(op)) {
0733         Py_hash_t hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(
0734                              _PyASCIIObject_CAST(op)->hash);
0735         if (hash != -1) {
0736             return hash;
0737         }
0738     }
0739     return PyObject_Hash(op);
0740 }
0741 
0742 // Fast inlined version of PyType_IS_GC()
0743 #define _PyType_IS_GC(t) _PyType_HasFeature((t), Py_TPFLAGS_HAVE_GC)
0744 
0745 static inline size_t
0746 _PyType_PreHeaderSize(PyTypeObject *tp)
0747 {
0748     return (
0749 #ifndef Py_GIL_DISABLED
0750         _PyType_IS_GC(tp) * sizeof(PyGC_Head) +
0751 #endif
0752         _PyType_HasFeature(tp, Py_TPFLAGS_PREHEADER) * 2 * sizeof(PyObject *)
0753     );
0754 }
0755 
0756 void _PyObject_GC_Link(PyObject *op);
0757 
0758 // Usage: assert(_Py_CheckSlotResult(obj, "__getitem__", result != NULL));
0759 extern int _Py_CheckSlotResult(
0760     PyObject *obj,
0761     const char *slot_name,
0762     int success);
0763 
0764 // Test if a type supports weak references
0765 static inline int _PyType_SUPPORTS_WEAKREFS(PyTypeObject *type) {
0766     return (type->tp_weaklistoffset != 0);
0767 }
0768 
0769 extern PyObject* _PyType_AllocNoTrack(PyTypeObject *type, Py_ssize_t nitems);
0770 extern PyObject *_PyType_NewManagedObject(PyTypeObject *type);
0771 
0772 extern PyTypeObject* _PyType_CalculateMetaclass(PyTypeObject *, PyObject *);
0773 extern PyObject* _PyType_GetDocFromInternalDoc(const char *, const char *);
0774 extern PyObject* _PyType_GetTextSignatureFromInternalDoc(const char *, const char *, int);
0775 extern int _PyObject_SetAttributeErrorContext(PyObject *v, PyObject* name);
0776 
0777 void _PyObject_InitInlineValues(PyObject *obj, PyTypeObject *tp);
0778 extern int _PyObject_StoreInstanceAttribute(PyObject *obj,
0779                                             PyObject *name, PyObject *value);
0780 extern bool _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name,
0781                                               PyObject **attr);
0782 
0783 #ifdef Py_GIL_DISABLED
0784 #  define MANAGED_DICT_OFFSET    (((Py_ssize_t)sizeof(PyObject *))*-1)
0785 #  define MANAGED_WEAKREF_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-2)
0786 #else
0787 #  define MANAGED_DICT_OFFSET    (((Py_ssize_t)sizeof(PyObject *))*-3)
0788 #  define MANAGED_WEAKREF_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-4)
0789 #endif
0790 
0791 typedef union {
0792     PyDictObject *dict;
0793 } PyManagedDictPointer;
0794 
0795 static inline PyManagedDictPointer *
0796 _PyObject_ManagedDictPointer(PyObject *obj)
0797 {
0798     assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
0799     return (PyManagedDictPointer *)((char *)obj + MANAGED_DICT_OFFSET);
0800 }
0801 
0802 static inline PyDictObject *
0803 _PyObject_GetManagedDict(PyObject *obj)
0804 {
0805     PyManagedDictPointer *dorv = _PyObject_ManagedDictPointer(obj);
0806     return (PyDictObject *)FT_ATOMIC_LOAD_PTR_ACQUIRE(dorv->dict);
0807 }
0808 
0809 static inline PyDictValues *
0810 _PyObject_InlineValues(PyObject *obj)
0811 {
0812     assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
0813     assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
0814     assert(Py_TYPE(obj)->tp_basicsize == sizeof(PyObject));
0815     return (PyDictValues *)((char *)obj + sizeof(PyObject));
0816 }
0817 
0818 extern PyObject ** _PyObject_ComputedDictPointer(PyObject *);
0819 extern int _PyObject_IsInstanceDictEmpty(PyObject *);
0820 
0821 // Export for 'math' shared extension
0822 PyAPI_FUNC(PyObject*) _PyObject_LookupSpecial(PyObject *, PyObject *);
0823 
0824 extern int _PyObject_IsAbstract(PyObject *);
0825 
0826 PyAPI_FUNC(int) _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method);
0827 extern PyObject* _PyObject_NextNotImplemented(PyObject *);
0828 
0829 // Pickle support.
0830 // Export for '_datetime' shared extension
0831 PyAPI_FUNC(PyObject*) _PyObject_GetState(PyObject *);
0832 
0833 /* C function call trampolines to mitigate bad function pointer casts.
0834  *
0835  * Typical native ABIs ignore additional arguments or fill in missing
0836  * values with 0/NULL in function pointer cast. Compilers do not show
0837  * warnings when a function pointer is explicitly casted to an
0838  * incompatible type.
0839  *
0840  * Bad fpcasts are an issue in WebAssembly. WASM's indirect_call has strict
0841  * function signature checks. Argument count, types, and return type must
0842  * match.
0843  *
0844  * Third party code unintentionally rely on problematic fpcasts. The call
0845  * trampoline mitigates common occurrences of bad fpcasts on Emscripten.
0846  */
0847 #if !(defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE))
0848 #define _PyCFunction_TrampolineCall(meth, self, args) \
0849     (meth)((self), (args))
0850 #define _PyCFunctionWithKeywords_TrampolineCall(meth, self, args, kw) \
0851     (meth)((self), (args), (kw))
0852 #endif // __EMSCRIPTEN__ && PY_CALL_TRAMPOLINE
0853 
0854 // Export these 2 symbols for '_pickle' shared extension
0855 PyAPI_DATA(PyTypeObject) _PyNone_Type;
0856 PyAPI_DATA(PyTypeObject) _PyNotImplemented_Type;
0857 
0858 // Maps Py_LT to Py_GT, ..., Py_GE to Py_LE.
0859 // Export for the stable ABI.
0860 PyAPI_DATA(int) _Py_SwappedOp[];
0861 
0862 extern void _Py_GetConstant_Init(void);
0863 
0864 #ifdef __cplusplus
0865 }
0866 #endif
0867 #endif /* !Py_INTERNAL_OBJECT_H */