Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-11-19 09:50:46

0001 #ifndef Py_INTERNAL_GC_H
0002 #define Py_INTERNAL_GC_H
0003 #ifdef __cplusplus
0004 extern "C" {
0005 #endif
0006 
0007 #ifndef Py_BUILD_CORE
0008 #  error "this header requires Py_BUILD_CORE define"
0009 #endif
0010 
0011 #include "pycore_freelist.h"   // _PyFreeListState
0012 
0013 /* GC information is stored BEFORE the object structure. */
0014 typedef struct {
0015     // Pointer to next object in the list.
0016     // 0 means the object is not tracked
0017     uintptr_t _gc_next;
0018 
0019     // Pointer to previous object in the list.
0020     // Lowest two bits are used for flags documented later.
0021     uintptr_t _gc_prev;
0022 } PyGC_Head;
0023 
0024 #define _PyGC_Head_UNUSED PyGC_Head
0025 
0026 
0027 /* Get an object's GC head */
0028 static inline PyGC_Head* _Py_AS_GC(PyObject *op) {
0029     char *gc = ((char*)op) - sizeof(PyGC_Head);
0030     return (PyGC_Head*)gc;
0031 }
0032 
0033 /* Get the object given the GC head */
0034 static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) {
0035     char *op = ((char *)gc) + sizeof(PyGC_Head);
0036     return (PyObject *)op;
0037 }
0038 
0039 
0040 /* Bit flags for ob_gc_bits (in Py_GIL_DISABLED builds)
0041  *
0042  * Setting the bits requires a relaxed store. The per-object lock must also be
0043  * held, except when the object is only visible to a single thread (e.g. during
0044  * object initialization or destruction).
0045  *
0046  * Reading the bits requires using a relaxed load, but does not require holding
0047  * the per-object lock.
0048  */
0049 #ifdef Py_GIL_DISABLED
0050 #  define _PyGC_BITS_TRACKED        (1)     // Tracked by the GC
0051 #  define _PyGC_BITS_FINALIZED      (2)     // tp_finalize was called
0052 #  define _PyGC_BITS_UNREACHABLE    (4)
0053 #  define _PyGC_BITS_FROZEN         (8)
0054 #  define _PyGC_BITS_SHARED         (16)
0055 #  define _PyGC_BITS_SHARED_INLINE  (32)
0056 #  define _PyGC_BITS_DEFERRED       (64)    // Use deferred reference counting
0057 #endif
0058 
0059 #ifdef Py_GIL_DISABLED
0060 
0061 static inline void
0062 _PyObject_SET_GC_BITS(PyObject *op, uint8_t new_bits)
0063 {
0064     uint8_t bits = _Py_atomic_load_uint8_relaxed(&op->ob_gc_bits);
0065     _Py_atomic_store_uint8_relaxed(&op->ob_gc_bits, bits | new_bits);
0066 }
0067 
0068 static inline int
0069 _PyObject_HAS_GC_BITS(PyObject *op, uint8_t bits)
0070 {
0071     return (_Py_atomic_load_uint8_relaxed(&op->ob_gc_bits) & bits) != 0;
0072 }
0073 
0074 static inline void
0075 _PyObject_CLEAR_GC_BITS(PyObject *op, uint8_t bits_to_clear)
0076 {
0077     uint8_t bits = _Py_atomic_load_uint8_relaxed(&op->ob_gc_bits);
0078     _Py_atomic_store_uint8_relaxed(&op->ob_gc_bits, bits & ~bits_to_clear);
0079 }
0080 
0081 #endif
0082 
0083 /* True if the object is currently tracked by the GC. */
0084 static inline int _PyObject_GC_IS_TRACKED(PyObject *op) {
0085 #ifdef Py_GIL_DISABLED
0086     return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_TRACKED);
0087 #else
0088     PyGC_Head *gc = _Py_AS_GC(op);
0089     return (gc->_gc_next != 0);
0090 #endif
0091 }
0092 #define _PyObject_GC_IS_TRACKED(op) _PyObject_GC_IS_TRACKED(_Py_CAST(PyObject*, op))
0093 
0094 /* True if the object may be tracked by the GC in the future, or already is.
0095    This can be useful to implement some optimizations. */
0096 static inline int _PyObject_GC_MAY_BE_TRACKED(PyObject *obj) {
0097     if (!PyObject_IS_GC(obj)) {
0098         return 0;
0099     }
0100     if (PyTuple_CheckExact(obj)) {
0101         return _PyObject_GC_IS_TRACKED(obj);
0102     }
0103     return 1;
0104 }
0105 
0106 #ifdef Py_GIL_DISABLED
0107 
0108 /* True if memory the object references is shared between
0109  * multiple threads and needs special purpose when freeing
0110  * those references due to the possibility of in-flight
0111  * lock-free reads occurring.  The object is responsible
0112  * for calling _PyMem_FreeDelayed on the referenced
0113  * memory. */
0114 static inline int _PyObject_GC_IS_SHARED(PyObject *op) {
0115     return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_SHARED);
0116 }
0117 #define _PyObject_GC_IS_SHARED(op) _PyObject_GC_IS_SHARED(_Py_CAST(PyObject*, op))
0118 
0119 static inline void _PyObject_GC_SET_SHARED(PyObject *op) {
0120     _PyObject_SET_GC_BITS(op, _PyGC_BITS_SHARED);
0121 }
0122 #define _PyObject_GC_SET_SHARED(op) _PyObject_GC_SET_SHARED(_Py_CAST(PyObject*, op))
0123 
0124 /* True if the memory of the object is shared between multiple
0125  * threads and needs special purpose when freeing due to
0126  * the possibility of in-flight lock-free reads occurring.
0127  * Objects with this bit that are GC objects will automatically
0128  * delay-freed by PyObject_GC_Del. */
0129 static inline int _PyObject_GC_IS_SHARED_INLINE(PyObject *op) {
0130     return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_SHARED_INLINE);
0131 }
0132 #define _PyObject_GC_IS_SHARED_INLINE(op) \
0133     _PyObject_GC_IS_SHARED_INLINE(_Py_CAST(PyObject*, op))
0134 
0135 static inline void _PyObject_GC_SET_SHARED_INLINE(PyObject *op) {
0136     _PyObject_SET_GC_BITS(op, _PyGC_BITS_SHARED_INLINE);
0137 }
0138 #define _PyObject_GC_SET_SHARED_INLINE(op) \
0139     _PyObject_GC_SET_SHARED_INLINE(_Py_CAST(PyObject*, op))
0140 
0141 #endif
0142 
0143 /* Bit flags for _gc_prev */
0144 /* Bit 0 is set when tp_finalize is called */
0145 #define _PyGC_PREV_MASK_FINALIZED  (1)
0146 /* Bit 1 is set when the object is in generation which is GCed currently. */
0147 #define _PyGC_PREV_MASK_COLLECTING (2)
0148 /* The (N-2) most significant bits contain the real address. */
0149 #define _PyGC_PREV_SHIFT           (2)
0150 #define _PyGC_PREV_MASK            (((uintptr_t) -1) << _PyGC_PREV_SHIFT)
0151 
0152 /* set for debugging information */
0153 #define _PyGC_DEBUG_STATS             (1<<0) /* print collection statistics */
0154 #define _PyGC_DEBUG_COLLECTABLE       (1<<1) /* print collectable objects */
0155 #define _PyGC_DEBUG_UNCOLLECTABLE     (1<<2) /* print uncollectable objects */
0156 #define _PyGC_DEBUG_SAVEALL           (1<<5) /* save all garbage in gc.garbage */
0157 #define _PyGC_DEBUG_LEAK              _PyGC_DEBUG_COLLECTABLE | \
0158                                       _PyGC_DEBUG_UNCOLLECTABLE | \
0159                                       _PyGC_DEBUG_SAVEALL
0160 
0161 typedef enum {
0162     // GC was triggered by heap allocation
0163     _Py_GC_REASON_HEAP,
0164 
0165     // GC was called during shutdown
0166     _Py_GC_REASON_SHUTDOWN,
0167 
0168     // GC was called by gc.collect() or PyGC_Collect()
0169     _Py_GC_REASON_MANUAL
0170 } _PyGC_Reason;
0171 
0172 // Lowest bit of _gc_next is used for flags only in GC.
0173 // But it is always 0 for normal code.
0174 static inline PyGC_Head* _PyGCHead_NEXT(PyGC_Head *gc) {
0175     uintptr_t next = gc->_gc_next;
0176     return (PyGC_Head*)next;
0177 }
0178 static inline void _PyGCHead_SET_NEXT(PyGC_Head *gc, PyGC_Head *next) {
0179     gc->_gc_next = (uintptr_t)next;
0180 }
0181 
0182 // Lowest two bits of _gc_prev is used for _PyGC_PREV_MASK_* flags.
0183 static inline PyGC_Head* _PyGCHead_PREV(PyGC_Head *gc) {
0184     uintptr_t prev = (gc->_gc_prev & _PyGC_PREV_MASK);
0185     return (PyGC_Head*)prev;
0186 }
0187 static inline void _PyGCHead_SET_PREV(PyGC_Head *gc, PyGC_Head *prev) {
0188     uintptr_t uprev = (uintptr_t)prev;
0189     assert((uprev & ~_PyGC_PREV_MASK) == 0);
0190     gc->_gc_prev = ((gc->_gc_prev & ~_PyGC_PREV_MASK) | uprev);
0191 }
0192 
0193 static inline int _PyGC_FINALIZED(PyObject *op) {
0194 #ifdef Py_GIL_DISABLED
0195     return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_FINALIZED);
0196 #else
0197     PyGC_Head *gc = _Py_AS_GC(op);
0198     return ((gc->_gc_prev & _PyGC_PREV_MASK_FINALIZED) != 0);
0199 #endif
0200 }
0201 static inline void _PyGC_SET_FINALIZED(PyObject *op) {
0202 #ifdef Py_GIL_DISABLED
0203     _PyObject_SET_GC_BITS(op, _PyGC_BITS_FINALIZED);
0204 #else
0205     PyGC_Head *gc = _Py_AS_GC(op);
0206     gc->_gc_prev |= _PyGC_PREV_MASK_FINALIZED;
0207 #endif
0208 }
0209 static inline void _PyGC_CLEAR_FINALIZED(PyObject *op) {
0210 #ifdef Py_GIL_DISABLED
0211     _PyObject_CLEAR_GC_BITS(op, _PyGC_BITS_FINALIZED);
0212 #else
0213     PyGC_Head *gc = _Py_AS_GC(op);
0214     gc->_gc_prev &= ~_PyGC_PREV_MASK_FINALIZED;
0215 #endif
0216 }
0217 
0218 
0219 /* GC runtime state */
0220 
0221 /* If we change this, we need to change the default value in the
0222    signature of gc.collect. */
0223 #define NUM_GENERATIONS 3
0224 /*
0225    NOTE: about untracking of mutable objects.
0226 
0227    Certain types of container cannot participate in a reference cycle, and
0228    so do not need to be tracked by the garbage collector. Untracking these
0229    objects reduces the cost of garbage collections. However, determining
0230    which objects may be untracked is not free, and the costs must be
0231    weighed against the benefits for garbage collection.
0232 
0233    There are two possible strategies for when to untrack a container:
0234 
0235    i) When the container is created.
0236    ii) When the container is examined by the garbage collector.
0237 
0238    Tuples containing only immutable objects (integers, strings etc, and
0239    recursively, tuples of immutable objects) do not need to be tracked.
0240    The interpreter creates a large number of tuples, many of which will
0241    not survive until garbage collection. It is therefore not worthwhile
0242    to untrack eligible tuples at creation time.
0243 
0244    Instead, all tuples except the empty tuple are tracked when created.
0245    During garbage collection it is determined whether any surviving tuples
0246    can be untracked. A tuple can be untracked if all of its contents are
0247    already not tracked. Tuples are examined for untracking in all garbage
0248    collection cycles. It may take more than one cycle to untrack a tuple.
0249 
0250    Dictionaries containing only immutable objects also do not need to be
0251    tracked. Dictionaries are untracked when created. If a tracked item is
0252    inserted into a dictionary (either as a key or value), the dictionary
0253    becomes tracked. During a full garbage collection (all generations),
0254    the collector will untrack any dictionaries whose contents are not
0255    tracked.
0256 
0257    The module provides the python function is_tracked(obj), which returns
0258    the CURRENT tracking status of the object. Subsequent garbage
0259    collections may change the tracking status of the object.
0260 
0261    Untracking of certain containers was introduced in issue #4688, and
0262    the algorithm was refined in response to issue #14775.
0263 */
0264 
0265 struct gc_generation {
0266     PyGC_Head head;
0267     int threshold; /* collection threshold */
0268     int count; /* count of allocations or collections of younger
0269                   generations */
0270 };
0271 
0272 /* Running stats per generation */
0273 struct gc_generation_stats {
0274     /* total number of collections */
0275     Py_ssize_t collections;
0276     /* total number of collected objects */
0277     Py_ssize_t collected;
0278     /* total number of uncollectable objects (put into gc.garbage) */
0279     Py_ssize_t uncollectable;
0280 };
0281 
0282 struct _gc_runtime_state {
0283     /* List of objects that still need to be cleaned up, singly linked
0284      * via their gc headers' gc_prev pointers.  */
0285     PyObject *trash_delete_later;
0286     /* Current call-stack depth of tp_dealloc calls. */
0287     int trash_delete_nesting;
0288 
0289     /* Is automatic collection enabled? */
0290     int enabled;
0291     int debug;
0292     /* linked lists of container objects */
0293     struct gc_generation generations[NUM_GENERATIONS];
0294     PyGC_Head *generation0;
0295     /* a permanent generation which won't be collected */
0296     struct gc_generation permanent_generation;
0297     struct gc_generation_stats generation_stats[NUM_GENERATIONS];
0298     /* true if we are currently running the collector */
0299     int collecting;
0300     /* list of uncollectable objects */
0301     PyObject *garbage;
0302     /* a list of callbacks to be invoked when collection is performed */
0303     PyObject *callbacks;
0304 
0305     /* This is the number of objects that survived the last full
0306        collection. It approximates the number of long lived objects
0307        tracked by the GC.
0308 
0309        (by "full collection", we mean a collection of the oldest
0310        generation). */
0311     Py_ssize_t long_lived_total;
0312     /* This is the number of objects that survived all "non-full"
0313        collections, and are awaiting to undergo a full collection for
0314        the first time. */
0315     Py_ssize_t long_lived_pending;
0316 
0317 #ifdef Py_GIL_DISABLED
0318     /* gh-117783: Deferred reference counting is not fully implemented yet, so
0319        as a temporary measure we treat objects using deferred reference
0320        counting as immortal. The value may be zero, one, or a negative number:
0321         0: immortalize deferred RC objects once the first thread is created
0322         1: immortalize all deferred RC objects immediately
0323         <0: suppressed; don't immortalize objects */
0324     int immortalize;
0325 #endif
0326 };
0327 
0328 #ifdef Py_GIL_DISABLED
0329 struct _gc_thread_state {
0330     /* Thread-local allocation count. */
0331     Py_ssize_t alloc_count;
0332 };
0333 #endif
0334 
0335 
0336 extern void _PyGC_InitState(struct _gc_runtime_state *);
0337 
0338 extern Py_ssize_t _PyGC_Collect(PyThreadState *tstate, int generation,
0339                                 _PyGC_Reason reason);
0340 extern void _PyGC_CollectNoFail(PyThreadState *tstate);
0341 
0342 /* Freeze objects tracked by the GC and ignore them in future collections. */
0343 extern void _PyGC_Freeze(PyInterpreterState *interp);
0344 /* Unfreezes objects placing them in the oldest generation */
0345 extern void _PyGC_Unfreeze(PyInterpreterState *interp);
0346 /* Number of frozen objects */
0347 extern Py_ssize_t _PyGC_GetFreezeCount(PyInterpreterState *interp);
0348 
0349 extern PyObject *_PyGC_GetObjects(PyInterpreterState *interp, int generation);
0350 extern PyObject *_PyGC_GetReferrers(PyInterpreterState *interp, PyObject *objs);
0351 
0352 // Functions to clear types free lists
0353 extern void _PyGC_ClearAllFreeLists(PyInterpreterState *interp);
0354 extern void _Py_ScheduleGC(PyThreadState *tstate);
0355 extern void _Py_RunGC(PyThreadState *tstate);
0356 
0357 #ifdef Py_GIL_DISABLED
0358 // gh-117783: Immortalize objects that use deferred reference counting
0359 extern void _PyGC_ImmortalizeDeferredObjects(PyInterpreterState *interp);
0360 #endif
0361 
0362 #ifdef __cplusplus
0363 }
0364 #endif
0365 #endif /* !Py_INTERNAL_GC_H */