Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-11-19 09:50:45

0001 #ifndef Py_INTERNAL_CODE_H
0002 #define Py_INTERNAL_CODE_H
0003 #ifdef __cplusplus
0004 extern "C" {
0005 #endif
0006 
0007 #ifndef Py_BUILD_CORE
0008 #  error "this header requires Py_BUILD_CORE define"
0009 #endif
0010 
0011 #include "pycore_lock.h"        // PyMutex
0012 #include "pycore_backoff.h"     // _Py_BackoffCounter
0013 
0014 
0015 /* Each instruction in a code object is a fixed-width value,
0016  * currently 2 bytes: 1-byte opcode + 1-byte oparg.  The EXTENDED_ARG
0017  * opcode allows for larger values but the current limit is 3 uses
0018  * of EXTENDED_ARG (see Python/compile.c), for a maximum
0019  * 32-bit value.  This aligns with the note in Python/compile.c
0020  * (compiler_addop_i_line) indicating that the max oparg value is
0021  * 2**32 - 1, rather than INT_MAX.
0022  */
0023 
0024 typedef union {
0025     uint16_t cache;
0026     struct {
0027         uint8_t code;
0028         uint8_t arg;
0029     } op;
0030     _Py_BackoffCounter counter;  // First cache entry of specializable op
0031 } _Py_CODEUNIT;
0032 
0033 #define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive)
0034 #define _PyCode_NBYTES(CO) (Py_SIZE(CO) * (Py_ssize_t)sizeof(_Py_CODEUNIT))
0035 
0036 
0037 /* These macros only remain defined for compatibility. */
0038 #define _Py_OPCODE(word) ((word).op.code)
0039 #define _Py_OPARG(word) ((word).op.arg)
0040 
0041 static inline _Py_CODEUNIT
0042 _py_make_codeunit(uint8_t opcode, uint8_t oparg)
0043 {
0044     // No designated initialisers because of C++ compat
0045     _Py_CODEUNIT word;
0046     word.op.code = opcode;
0047     word.op.arg = oparg;
0048     return word;
0049 }
0050 
0051 static inline void
0052 _py_set_opcode(_Py_CODEUNIT *word, uint8_t opcode)
0053 {
0054     word->op.code = opcode;
0055 }
0056 
0057 #define _Py_MAKE_CODEUNIT(opcode, oparg) _py_make_codeunit((opcode), (oparg))
0058 #define _Py_SET_OPCODE(word, opcode) _py_set_opcode(&(word), (opcode))
0059 
0060 
0061 // We hide some of the newer PyCodeObject fields behind macros.
0062 // This helps with backporting certain changes to 3.12.
0063 #define _PyCode_HAS_EXECUTORS(CODE) \
0064     (CODE->co_executors != NULL)
0065 #define _PyCode_HAS_INSTRUMENTATION(CODE) \
0066     (CODE->_co_instrumentation_version > 0)
0067 
0068 struct _py_code_state {
0069     PyMutex mutex;
0070     // Interned constants from code objects. Used by the free-threaded build.
0071     struct _Py_hashtable_t *constants;
0072 };
0073 
0074 extern PyStatus _PyCode_Init(PyInterpreterState *interp);
0075 extern void _PyCode_Fini(PyInterpreterState *interp);
0076 
0077 #define CODE_MAX_WATCHERS 8
0078 
0079 /* PEP 659
0080  * Specialization and quickening structs and helper functions
0081  */
0082 
0083 
0084 // Inline caches. If you change the number of cache entries for an instruction,
0085 // you must *also* update the number of cache entries in Lib/opcode.py and bump
0086 // the magic number in Lib/importlib/_bootstrap_external.py!
0087 
0088 #define CACHE_ENTRIES(cache) (sizeof(cache)/sizeof(_Py_CODEUNIT))
0089 
0090 typedef struct {
0091     _Py_BackoffCounter counter;
0092     uint16_t module_keys_version;
0093     uint16_t builtin_keys_version;
0094     uint16_t index;
0095 } _PyLoadGlobalCache;
0096 
0097 #define INLINE_CACHE_ENTRIES_LOAD_GLOBAL CACHE_ENTRIES(_PyLoadGlobalCache)
0098 
0099 typedef struct {
0100     _Py_BackoffCounter counter;
0101 } _PyBinaryOpCache;
0102 
0103 #define INLINE_CACHE_ENTRIES_BINARY_OP CACHE_ENTRIES(_PyBinaryOpCache)
0104 
0105 typedef struct {
0106     _Py_BackoffCounter counter;
0107 } _PyUnpackSequenceCache;
0108 
0109 #define INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE \
0110     CACHE_ENTRIES(_PyUnpackSequenceCache)
0111 
0112 typedef struct {
0113     _Py_BackoffCounter counter;
0114 } _PyCompareOpCache;
0115 
0116 #define INLINE_CACHE_ENTRIES_COMPARE_OP CACHE_ENTRIES(_PyCompareOpCache)
0117 
0118 typedef struct {
0119     _Py_BackoffCounter counter;
0120 } _PyBinarySubscrCache;
0121 
0122 #define INLINE_CACHE_ENTRIES_BINARY_SUBSCR CACHE_ENTRIES(_PyBinarySubscrCache)
0123 
0124 typedef struct {
0125     _Py_BackoffCounter counter;
0126 } _PySuperAttrCache;
0127 
0128 #define INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR CACHE_ENTRIES(_PySuperAttrCache)
0129 
0130 typedef struct {
0131     _Py_BackoffCounter counter;
0132     uint16_t version[2];
0133     uint16_t index;
0134 } _PyAttrCache;
0135 
0136 typedef struct {
0137     _Py_BackoffCounter counter;
0138     uint16_t type_version[2];
0139     union {
0140         uint16_t keys_version[2];
0141         uint16_t dict_offset;
0142     };
0143     uint16_t descr[4];
0144 } _PyLoadMethodCache;
0145 
0146 
0147 // MUST be the max(_PyAttrCache, _PyLoadMethodCache)
0148 #define INLINE_CACHE_ENTRIES_LOAD_ATTR CACHE_ENTRIES(_PyLoadMethodCache)
0149 
0150 #define INLINE_CACHE_ENTRIES_STORE_ATTR CACHE_ENTRIES(_PyAttrCache)
0151 
0152 typedef struct {
0153     _Py_BackoffCounter counter;
0154     uint16_t func_version[2];
0155 } _PyCallCache;
0156 
0157 #define INLINE_CACHE_ENTRIES_CALL CACHE_ENTRIES(_PyCallCache)
0158 
0159 typedef struct {
0160     _Py_BackoffCounter counter;
0161 } _PyStoreSubscrCache;
0162 
0163 #define INLINE_CACHE_ENTRIES_STORE_SUBSCR CACHE_ENTRIES(_PyStoreSubscrCache)
0164 
0165 typedef struct {
0166     _Py_BackoffCounter counter;
0167 } _PyForIterCache;
0168 
0169 #define INLINE_CACHE_ENTRIES_FOR_ITER CACHE_ENTRIES(_PyForIterCache)
0170 
0171 typedef struct {
0172     _Py_BackoffCounter counter;
0173 } _PySendCache;
0174 
0175 #define INLINE_CACHE_ENTRIES_SEND CACHE_ENTRIES(_PySendCache)
0176 
0177 typedef struct {
0178     _Py_BackoffCounter counter;
0179     uint16_t version[2];
0180 } _PyToBoolCache;
0181 
0182 #define INLINE_CACHE_ENTRIES_TO_BOOL CACHE_ENTRIES(_PyToBoolCache)
0183 
0184 typedef struct {
0185     _Py_BackoffCounter counter;
0186 } _PyContainsOpCache;
0187 
0188 #define INLINE_CACHE_ENTRIES_CONTAINS_OP CACHE_ENTRIES(_PyContainsOpCache)
0189 
0190 // Borrowed references to common callables:
0191 struct callable_cache {
0192     PyObject *isinstance;
0193     PyObject *len;
0194     PyObject *list_append;
0195     PyObject *object__getattribute__;
0196 };
0197 
0198 /* "Locals plus" for a code object is the set of locals + cell vars +
0199  * free vars.  This relates to variable names as well as offsets into
0200  * the "fast locals" storage array of execution frames.  The compiler
0201  * builds the list of names, their offsets, and the corresponding
0202  * kind of local.
0203  *
0204  * Those kinds represent the source of the initial value and the
0205  * variable's scope (as related to closures).  A "local" is an
0206  * argument or other variable defined in the current scope.  A "free"
0207  * variable is one that is defined in an outer scope and comes from
0208  * the function's closure.  A "cell" variable is a local that escapes
0209  * into an inner function as part of a closure, and thus must be
0210  * wrapped in a cell.  Any "local" can also be a "cell", but the
0211  * "free" kind is mutually exclusive with both.
0212  */
0213 
0214 // Note that these all fit within a byte, as do combinations.
0215 // Later, we will use the smaller numbers to differentiate the different
0216 // kinds of locals (e.g. pos-only arg, varkwargs, local-only).
0217 #define CO_FAST_HIDDEN  0x10
0218 #define CO_FAST_LOCAL   0x20
0219 #define CO_FAST_CELL    0x40
0220 #define CO_FAST_FREE    0x80
0221 
0222 typedef unsigned char _PyLocals_Kind;
0223 
0224 static inline _PyLocals_Kind
0225 _PyLocals_GetKind(PyObject *kinds, int i)
0226 {
0227     assert(PyBytes_Check(kinds));
0228     assert(0 <= i && i < PyBytes_GET_SIZE(kinds));
0229     char *ptr = PyBytes_AS_STRING(kinds);
0230     return (_PyLocals_Kind)(ptr[i]);
0231 }
0232 
0233 static inline void
0234 _PyLocals_SetKind(PyObject *kinds, int i, _PyLocals_Kind kind)
0235 {
0236     assert(PyBytes_Check(kinds));
0237     assert(0 <= i && i < PyBytes_GET_SIZE(kinds));
0238     char *ptr = PyBytes_AS_STRING(kinds);
0239     ptr[i] = (char) kind;
0240 }
0241 
0242 
0243 struct _PyCodeConstructor {
0244     /* metadata */
0245     PyObject *filename;
0246     PyObject *name;
0247     PyObject *qualname;
0248     int flags;
0249 
0250     /* the code */
0251     PyObject *code;
0252     int firstlineno;
0253     PyObject *linetable;
0254 
0255     /* used by the code */
0256     PyObject *consts;
0257     PyObject *names;
0258 
0259     /* mapping frame offsets to information */
0260     PyObject *localsplusnames;  // Tuple of strings
0261     PyObject *localspluskinds;  // Bytes object, one byte per variable
0262 
0263     /* args (within varnames) */
0264     int argcount;
0265     int posonlyargcount;
0266     // XXX Replace argcount with posorkwargcount (argcount - posonlyargcount).
0267     int kwonlyargcount;
0268 
0269     /* needed to create the frame */
0270     int stacksize;
0271 
0272     /* used by the eval loop */
0273     PyObject *exceptiontable;
0274 };
0275 
0276 // Using an "arguments struct" like this is helpful for maintainability
0277 // in a case such as this with many parameters.  It does bear a risk:
0278 // if the struct changes and callers are not updated properly then the
0279 // compiler will not catch problems (like a missing argument).  This can
0280 // cause hard-to-debug problems.  The risk is mitigated by the use of
0281 // check_code() in codeobject.c.  However, we may decide to switch
0282 // back to a regular function signature.  Regardless, this approach
0283 // wouldn't be appropriate if this weren't a strictly internal API.
0284 // (See the comments in https://github.com/python/cpython/pull/26258.)
0285 extern int _PyCode_Validate(struct _PyCodeConstructor *);
0286 extern PyCodeObject* _PyCode_New(struct _PyCodeConstructor *);
0287 
0288 
0289 /* Private API */
0290 
0291 /* Getters for internal PyCodeObject data. */
0292 extern PyObject* _PyCode_GetVarnames(PyCodeObject *);
0293 extern PyObject* _PyCode_GetCellvars(PyCodeObject *);
0294 extern PyObject* _PyCode_GetFreevars(PyCodeObject *);
0295 extern PyObject* _PyCode_GetCode(PyCodeObject *);
0296 
0297 /** API for initializing the line number tables. */
0298 extern int _PyCode_InitAddressRange(PyCodeObject* co, PyCodeAddressRange *bounds);
0299 
0300 /** Out of process API for initializing the location table. */
0301 extern void _PyLineTable_InitAddressRange(
0302     const char *linetable,
0303     Py_ssize_t length,
0304     int firstlineno,
0305     PyCodeAddressRange *range);
0306 
0307 /** API for traversing the line number table. */
0308 extern int _PyLineTable_NextAddressRange(PyCodeAddressRange *range);
0309 extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range);
0310 
0311 /** API for executors */
0312 extern void _PyCode_Clear_Executors(PyCodeObject *code);
0313 
0314 #ifdef Py_GIL_DISABLED
0315 // gh-115999 tracks progress on addressing this.
0316 #define ENABLE_SPECIALIZATION 0
0317 #else
0318 #define ENABLE_SPECIALIZATION 1
0319 #endif
0320 
0321 /* Specialization functions */
0322 
0323 extern void _Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls,
0324                                          _Py_CODEUNIT *instr, int load_method);
0325 extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr,
0326                                     PyObject *name);
0327 extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr,
0328                                      PyObject *name);
0329 extern void _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins,
0330                                       _Py_CODEUNIT *instr, PyObject *name);
0331 extern void _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container,
0332                                         _Py_CODEUNIT *instr);
0333 extern void _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub,
0334                                        _Py_CODEUNIT *instr);
0335 extern void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr,
0336                                 int nargs);
0337 extern void _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
0338                                     int oparg, PyObject **locals);
0339 extern void _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs,
0340                                      _Py_CODEUNIT *instr, int oparg);
0341 extern void _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr,
0342                                           int oparg);
0343 extern void _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg);
0344 extern void _Py_Specialize_Send(PyObject *receiver, _Py_CODEUNIT *instr);
0345 extern void _Py_Specialize_ToBool(PyObject *value, _Py_CODEUNIT *instr);
0346 extern void _Py_Specialize_ContainsOp(PyObject *value, _Py_CODEUNIT *instr);
0347 
0348 #ifdef Py_STATS
0349 
0350 #include "pycore_bitutils.h"  // _Py_bit_length
0351 
0352 #define STAT_INC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name++; } while (0)
0353 #define STAT_DEC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name--; } while (0)
0354 #define OPCODE_EXE_INC(opname) do { if (_Py_stats) _Py_stats->opcode_stats[opname].execution_count++; } while (0)
0355 #define CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.name++; } while (0)
0356 #define OBJECT_STAT_INC(name) do { if (_Py_stats) _Py_stats->object_stats.name++; } while (0)
0357 #define OBJECT_STAT_INC_COND(name, cond) \
0358     do { if (_Py_stats && cond) _Py_stats->object_stats.name++; } while (0)
0359 #define EVAL_CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.eval_calls[name]++; } while (0)
0360 #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) \
0361     do { if (_Py_stats && PyFunction_Check(callable)) _Py_stats->call_stats.eval_calls[name]++; } while (0)
0362 #define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0)
0363 #define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0)
0364 #define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0)
0365 #define UOP_PAIR_INC(uopcode, lastuop)                                              \
0366     do {                                                                            \
0367         if (lastuop && _Py_stats) {                                                 \
0368             _Py_stats->optimization_stats.opcode[lastuop].pair_count[uopcode]++;    \
0369         }                                                                           \
0370         lastuop = uopcode;                                                          \
0371     } while (0)
0372 #define OPT_UNSUPPORTED_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.unsupported_opcode[opname]++; } while (0)
0373 #define OPT_ERROR_IN_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.error_in_opcode[opname]++; } while (0)
0374 #define OPT_HIST(length, name) \
0375     do { \
0376         if (_Py_stats) { \
0377             int bucket = _Py_bit_length(length >= 1 ? length - 1 : 0); \
0378             bucket = (bucket >= _Py_UOP_HIST_SIZE) ? _Py_UOP_HIST_SIZE - 1 : bucket; \
0379             _Py_stats->optimization_stats.name[bucket]++; \
0380         } \
0381     } while (0)
0382 #define RARE_EVENT_STAT_INC(name) do { if (_Py_stats) _Py_stats->rare_event_stats.name++; } while (0)
0383 
0384 // Export for '_opcode' shared extension
0385 PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void);
0386 
0387 #else
0388 #define STAT_INC(opname, name) ((void)0)
0389 #define STAT_DEC(opname, name) ((void)0)
0390 #define OPCODE_EXE_INC(opname) ((void)0)
0391 #define CALL_STAT_INC(name) ((void)0)
0392 #define OBJECT_STAT_INC(name) ((void)0)
0393 #define OBJECT_STAT_INC_COND(name, cond) ((void)0)
0394 #define EVAL_CALL_STAT_INC(name) ((void)0)
0395 #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0)
0396 #define GC_STAT_ADD(gen, name, n) ((void)0)
0397 #define OPT_STAT_INC(name) ((void)0)
0398 #define UOP_STAT_INC(opname, name) ((void)0)
0399 #define UOP_PAIR_INC(uopcode, lastuop) ((void)0)
0400 #define OPT_UNSUPPORTED_OPCODE(opname) ((void)0)
0401 #define OPT_ERROR_IN_OPCODE(opname) ((void)0)
0402 #define OPT_HIST(length, name) ((void)0)
0403 #define RARE_EVENT_STAT_INC(name) ((void)0)
0404 #endif  // !Py_STATS
0405 
0406 // Utility functions for reading/writing 32/64-bit values in the inline caches.
0407 // Great care should be taken to ensure that these functions remain correct and
0408 // performant! They should compile to just "move" instructions on all supported
0409 // compilers and platforms.
0410 
0411 // We use memcpy to let the C compiler handle unaligned accesses and endianness
0412 // issues for us. It also seems to produce better code than manual copying for
0413 // most compilers (see https://blog.regehr.org/archives/959 for more info).
0414 
0415 static inline void
0416 write_u32(uint16_t *p, uint32_t val)
0417 {
0418     memcpy(p, &val, sizeof(val));
0419 }
0420 
0421 static inline void
0422 write_u64(uint16_t *p, uint64_t val)
0423 {
0424     memcpy(p, &val, sizeof(val));
0425 }
0426 
0427 static inline void
0428 write_obj(uint16_t *p, PyObject *val)
0429 {
0430     memcpy(p, &val, sizeof(val));
0431 }
0432 
0433 static inline uint16_t
0434 read_u16(uint16_t *p)
0435 {
0436     return *p;
0437 }
0438 
0439 static inline uint32_t
0440 read_u32(uint16_t *p)
0441 {
0442     uint32_t val;
0443     memcpy(&val, p, sizeof(val));
0444     return val;
0445 }
0446 
0447 static inline uint64_t
0448 read_u64(uint16_t *p)
0449 {
0450     uint64_t val;
0451     memcpy(&val, p, sizeof(val));
0452     return val;
0453 }
0454 
0455 static inline PyObject *
0456 read_obj(uint16_t *p)
0457 {
0458     PyObject *val;
0459     memcpy(&val, p, sizeof(val));
0460     return val;
0461 }
0462 
0463 /* See Objects/exception_handling_notes.txt for details.
0464  */
0465 static inline unsigned char *
0466 parse_varint(unsigned char *p, int *result) {
0467     int val = p[0] & 63;
0468     while (p[0] & 64) {
0469         p++;
0470         val = (val << 6) | (p[0] & 63);
0471     }
0472     *result = val;
0473     return p+1;
0474 }
0475 
0476 static inline int
0477 write_varint(uint8_t *ptr, unsigned int val)
0478 {
0479     int written = 1;
0480     while (val >= 64) {
0481         *ptr++ = 64 | (val & 63);
0482         val >>= 6;
0483         written++;
0484     }
0485     *ptr = (uint8_t)val;
0486     return written;
0487 }
0488 
0489 static inline int
0490 write_signed_varint(uint8_t *ptr, int val)
0491 {
0492     unsigned int uval;
0493     if (val < 0) {
0494         // (unsigned int)(-val) has an undefined behavior for INT_MIN
0495         uval = ((0 - (unsigned int)val) << 1) | 1;
0496     }
0497     else {
0498         uval = (unsigned int)val << 1;
0499     }
0500     return write_varint(ptr, uval);
0501 }
0502 
0503 static inline int
0504 write_location_entry_start(uint8_t *ptr, int code, int length)
0505 {
0506     assert((code & 15) == code);
0507     *ptr = 128 | (uint8_t)(code << 3) | (uint8_t)(length - 1);
0508     return 1;
0509 }
0510 
0511 
0512 /** Counters
0513  * The first 16-bit value in each inline cache is a counter.
0514  *
0515  * When counting executions until the next specialization attempt,
0516  * exponential backoff is used to reduce the number of specialization failures.
0517  * See pycore_backoff.h for more details.
0518  * On a specialization failure, the backoff counter is restarted.
0519  */
0520 
0521 #include "pycore_backoff.h"
0522 
0523 // A value of 1 means that we attempt to specialize the *second* time each
0524 // instruction is executed. Executing twice is a much better indicator of
0525 // "hotness" than executing once, but additional warmup delays only prevent
0526 // specialization. Most types stabilize by the second execution, too:
0527 #define ADAPTIVE_WARMUP_VALUE 1
0528 #define ADAPTIVE_WARMUP_BACKOFF 1
0529 
0530 // A value of 52 means that we attempt to re-specialize after 53 misses (a prime
0531 // number, useful for avoiding artifacts if every nth value is a different type
0532 // or something). Setting the backoff to 0 means that the counter is reset to
0533 // the same state as a warming-up instruction (value == 1, backoff == 1) after
0534 // deoptimization. This isn't strictly necessary, but it is bit easier to reason
0535 // about when thinking about the opcode transitions as a state machine:
0536 #define ADAPTIVE_COOLDOWN_VALUE 52
0537 #define ADAPTIVE_COOLDOWN_BACKOFF 0
0538 
0539 // Can't assert this in pycore_backoff.h because of header order dependencies
0540 #if COLD_EXIT_INITIAL_VALUE <= ADAPTIVE_COOLDOWN_VALUE
0541 #  error  "Cold exit value should be larger than adaptive cooldown value"
0542 #endif
0543 
0544 static inline _Py_BackoffCounter
0545 adaptive_counter_bits(uint16_t value, uint16_t backoff) {
0546     return make_backoff_counter(value, backoff);
0547 }
0548 
0549 static inline _Py_BackoffCounter
0550 adaptive_counter_warmup(void) {
0551     return adaptive_counter_bits(ADAPTIVE_WARMUP_VALUE,
0552                                  ADAPTIVE_WARMUP_BACKOFF);
0553 }
0554 
0555 static inline _Py_BackoffCounter
0556 adaptive_counter_cooldown(void) {
0557     return adaptive_counter_bits(ADAPTIVE_COOLDOWN_VALUE,
0558                                  ADAPTIVE_COOLDOWN_BACKOFF);
0559 }
0560 
0561 static inline _Py_BackoffCounter
0562 adaptive_counter_backoff(_Py_BackoffCounter counter) {
0563     return restart_backoff_counter(counter);
0564 }
0565 
0566 
0567 /* Comparison bit masks. */
0568 
0569 /* Note this evaluates its arguments twice each */
0570 #define COMPARISON_BIT(x, y) (1 << (2 * ((x) >= (y)) + ((x) <= (y))))
0571 
0572 /*
0573  * The following bits are chosen so that the value of
0574  * COMPARSION_BIT(left, right)
0575  * masked by the values below will be non-zero if the
0576  * comparison is true, and zero if it is false */
0577 
0578 /* This is for values that are unordered, ie. NaN, not types that are unordered, e.g. sets */
0579 #define COMPARISON_UNORDERED 1
0580 
0581 #define COMPARISON_LESS_THAN 2
0582 #define COMPARISON_GREATER_THAN 4
0583 #define COMPARISON_EQUALS 8
0584 
0585 #define COMPARISON_NOT_EQUALS (COMPARISON_UNORDERED | COMPARISON_LESS_THAN | COMPARISON_GREATER_THAN)
0586 
0587 extern int _Py_Instrument(PyCodeObject *co, PyInterpreterState *interp);
0588 
0589 extern int _Py_GetBaseOpcode(PyCodeObject *code, int offset);
0590 
0591 extern int _PyInstruction_GetLength(PyCodeObject *code, int offset);
0592 
0593 #ifdef __cplusplus
0594 }
0595 #endif
0596 #endif /* !Py_INTERNAL_CODE_H */