Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-11-19 09:50:45

0001 #ifndef Py_INTERNAL_CRITICAL_SECTION_H
0002 #define Py_INTERNAL_CRITICAL_SECTION_H
0003 
0004 #ifndef Py_BUILD_CORE
0005 #  error "this header requires Py_BUILD_CORE define"
0006 #endif
0007 
0008 #include "pycore_lock.h"        // PyMutex
0009 #include "pycore_pystate.h"     // _PyThreadState_GET()
0010 #include <stdint.h>
0011 
0012 #ifdef __cplusplus
0013 extern "C" {
0014 #endif
0015 
0016 // Tagged pointers to critical sections use the two least significant bits to
0017 // mark if the pointed-to critical section is inactive and whether it is a
0018 // PyCriticalSection2 object.
0019 #define _Py_CRITICAL_SECTION_INACTIVE       0x1
0020 #define _Py_CRITICAL_SECTION_TWO_MUTEXES    0x2
0021 #define _Py_CRITICAL_SECTION_MASK           0x3
0022 
0023 #ifdef Py_GIL_DISABLED
0024 # define Py_BEGIN_CRITICAL_SECTION_MUT(mutex)                           \
0025     {                                                                   \
0026         PyCriticalSection _py_cs;                                       \
0027         _PyCriticalSection_BeginMutex(&_py_cs, mutex)
0028 
0029 # define Py_BEGIN_CRITICAL_SECTION2_MUT(m1, m2)                         \
0030     {                                                                   \
0031         PyCriticalSection2 _py_cs2;                                     \
0032         _PyCriticalSection2_BeginMutex(&_py_cs2, m1, m2)
0033 
0034 // Specialized version of critical section locking to safely use
0035 // PySequence_Fast APIs without the GIL. For performance, the argument *to*
0036 // PySequence_Fast() is provided to the macro, not the *result* of
0037 // PySequence_Fast(), which would require an extra test to determine if the
0038 // lock must be acquired.
0039 # define Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST(original)              \
0040     {                                                                   \
0041         PyObject *_orig_seq = _PyObject_CAST(original);                 \
0042         const bool _should_lock_cs = PyList_CheckExact(_orig_seq);      \
0043         PyCriticalSection _cs;                                          \
0044         if (_should_lock_cs) {                                          \
0045             _PyCriticalSection_Begin(&_cs, _orig_seq);                  \
0046         }
0047 
0048 # define Py_END_CRITICAL_SECTION_SEQUENCE_FAST()                        \
0049         if (_should_lock_cs) {                                          \
0050             PyCriticalSection_End(&_cs);                                \
0051         }                                                               \
0052     }
0053 
0054 // Asserts that the mutex is locked.  The mutex must be held by the
0055 // top-most critical section otherwise there's the possibility
0056 // that the mutex would be swalled out in some code paths.
0057 #define _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(mutex) \
0058     _PyCriticalSection_AssertHeld(mutex)
0059 
0060 // Asserts that the mutex for the given object is locked. The mutex must
0061 // be held by the top-most critical section otherwise there's the
0062 // possibility that the mutex would be swalled out in some code paths.
0063 #ifdef Py_DEBUG
0064 
0065 # define _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op)                           \
0066     if (Py_REFCNT(op) != 1) {                                                    \
0067         _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&_PyObject_CAST(op)->ob_mutex); \
0068     }
0069 
0070 #else   /* Py_DEBUG */
0071 
0072 # define _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op)
0073 
0074 #endif  /* Py_DEBUG */
0075 
0076 #else  /* !Py_GIL_DISABLED */
0077 // The critical section APIs are no-ops with the GIL.
0078 # define Py_BEGIN_CRITICAL_SECTION_MUT(mut) {
0079 # define Py_BEGIN_CRITICAL_SECTION2_MUT(m1, m2) {
0080 # define Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST(original) {
0081 # define Py_END_CRITICAL_SECTION_SEQUENCE_FAST() }
0082 # define _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(mutex)
0083 # define _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op)
0084 #endif  /* !Py_GIL_DISABLED */
0085 
0086 // Resumes the top-most critical section.
0087 PyAPI_FUNC(void)
0088 _PyCriticalSection_Resume(PyThreadState *tstate);
0089 
0090 // (private) slow path for locking the mutex
0091 PyAPI_FUNC(void)
0092 _PyCriticalSection_BeginSlow(PyCriticalSection *c, PyMutex *m);
0093 
0094 PyAPI_FUNC(void)
0095 _PyCriticalSection2_BeginSlow(PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2,
0096                              int is_m1_locked);
0097 
0098 PyAPI_FUNC(void)
0099 _PyCriticalSection_SuspendAll(PyThreadState *tstate);
0100 
0101 #ifdef Py_GIL_DISABLED
0102 
0103 static inline int
0104 _PyCriticalSection_IsActive(uintptr_t tag)
0105 {
0106     return tag != 0 && (tag & _Py_CRITICAL_SECTION_INACTIVE) == 0;
0107 }
0108 
0109 static inline void
0110 _PyCriticalSection_BeginMutex(PyCriticalSection *c, PyMutex *m)
0111 {
0112     if (PyMutex_LockFast(&m->_bits)) {
0113         PyThreadState *tstate = _PyThreadState_GET();
0114         c->_cs_mutex = m;
0115         c->_cs_prev = tstate->critical_section;
0116         tstate->critical_section = (uintptr_t)c;
0117     }
0118     else {
0119         _PyCriticalSection_BeginSlow(c, m);
0120     }
0121 }
0122 
0123 static inline void
0124 _PyCriticalSection_Begin(PyCriticalSection *c, PyObject *op)
0125 {
0126     _PyCriticalSection_BeginMutex(c, &op->ob_mutex);
0127 }
0128 #define PyCriticalSection_Begin _PyCriticalSection_Begin
0129 
0130 // Removes the top-most critical section from the thread's stack of critical
0131 // sections. If the new top-most critical section is inactive, then it is
0132 // resumed.
0133 static inline void
0134 _PyCriticalSection_Pop(PyCriticalSection *c)
0135 {
0136     PyThreadState *tstate = _PyThreadState_GET();
0137     uintptr_t prev = c->_cs_prev;
0138     tstate->critical_section = prev;
0139 
0140     if ((prev & _Py_CRITICAL_SECTION_INACTIVE) != 0) {
0141         _PyCriticalSection_Resume(tstate);
0142     }
0143 }
0144 
0145 static inline void
0146 _PyCriticalSection_End(PyCriticalSection *c)
0147 {
0148     PyMutex_Unlock(c->_cs_mutex);
0149     _PyCriticalSection_Pop(c);
0150 }
0151 #define PyCriticalSection_End _PyCriticalSection_End
0152 
0153 static inline void
0154 _PyCriticalSection2_BeginMutex(PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2)
0155 {
0156     if (m1 == m2) {
0157         // If the two mutex arguments are the same, treat this as a critical
0158         // section with a single mutex.
0159         c->_cs_mutex2 = NULL;
0160         _PyCriticalSection_BeginMutex(&c->_cs_base, m1);
0161         return;
0162     }
0163 
0164     if ((uintptr_t)m2 < (uintptr_t)m1) {
0165         // Sort the mutexes so that the lower address is locked first.
0166         // The exact order does not matter, but we need to acquire the mutexes
0167         // in a consistent order to avoid lock ordering deadlocks.
0168         PyMutex *tmp = m1;
0169         m1 = m2;
0170         m2 = tmp;
0171     }
0172 
0173     if (PyMutex_LockFast(&m1->_bits)) {
0174         if (PyMutex_LockFast(&m2->_bits)) {
0175             PyThreadState *tstate = _PyThreadState_GET();
0176             c->_cs_base._cs_mutex = m1;
0177             c->_cs_mutex2 = m2;
0178             c->_cs_base._cs_prev = tstate->critical_section;
0179 
0180             uintptr_t p = (uintptr_t)c | _Py_CRITICAL_SECTION_TWO_MUTEXES;
0181             tstate->critical_section = p;
0182         }
0183         else {
0184             _PyCriticalSection2_BeginSlow(c, m1, m2, 1);
0185         }
0186     }
0187     else {
0188         _PyCriticalSection2_BeginSlow(c, m1, m2, 0);
0189     }
0190 }
0191 
0192 static inline void
0193 _PyCriticalSection2_Begin(PyCriticalSection2 *c, PyObject *a, PyObject *b)
0194 {
0195     _PyCriticalSection2_BeginMutex(c, &a->ob_mutex, &b->ob_mutex);
0196 }
0197 #define PyCriticalSection2_Begin _PyCriticalSection2_Begin
0198 
0199 static inline void
0200 _PyCriticalSection2_End(PyCriticalSection2 *c)
0201 {
0202     if (c->_cs_mutex2) {
0203         PyMutex_Unlock(c->_cs_mutex2);
0204     }
0205     PyMutex_Unlock(c->_cs_base._cs_mutex);
0206     _PyCriticalSection_Pop(&c->_cs_base);
0207 }
0208 #define PyCriticalSection2_End _PyCriticalSection2_End
0209 
0210 static inline void
0211 _PyCriticalSection_AssertHeld(PyMutex *mutex)
0212 {
0213 #ifdef Py_DEBUG
0214     PyThreadState *tstate = _PyThreadState_GET();
0215     uintptr_t prev = tstate->critical_section;
0216     if (prev & _Py_CRITICAL_SECTION_TWO_MUTEXES) {
0217         PyCriticalSection2 *cs = (PyCriticalSection2 *)(prev & ~_Py_CRITICAL_SECTION_MASK);
0218         assert(cs != NULL && (cs->_cs_base._cs_mutex == mutex || cs->_cs_mutex2 == mutex));
0219     }
0220     else {
0221         PyCriticalSection *cs = (PyCriticalSection *)(tstate->critical_section & ~_Py_CRITICAL_SECTION_MASK);
0222         assert(cs != NULL && cs->_cs_mutex == mutex);
0223     }
0224 
0225 #endif
0226 }
0227 
0228 #endif /* Py_GIL_DISABLED */
0229 
0230 #ifdef __cplusplus
0231 }
0232 #endif
0233 #endif /* !Py_INTERNAL_CRITICAL_SECTION_H */