File indexing completed on 2025-11-19 09:50:45
0001 #ifndef Py_INTERNAL_CODE_H
0002 #define Py_INTERNAL_CODE_H
0003 #ifdef __cplusplus
0004 extern "C" {
0005 #endif
0006
0007 #ifndef Py_BUILD_CORE
0008 # error "this header requires Py_BUILD_CORE define"
0009 #endif
0010
0011 #include "pycore_lock.h" // PyMutex
0012 #include "pycore_backoff.h" // _Py_BackoffCounter
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 typedef union {
0025 uint16_t cache;
0026 struct {
0027 uint8_t code;
0028 uint8_t arg;
0029 } op;
0030 _Py_BackoffCounter counter;
0031 } _Py_CODEUNIT;
0032
0033 #define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive)
0034 #define _PyCode_NBYTES(CO) (Py_SIZE(CO) * (Py_ssize_t)sizeof(_Py_CODEUNIT))
0035
0036
0037
0038 #define _Py_OPCODE(word) ((word).op.code)
0039 #define _Py_OPARG(word) ((word).op.arg)
0040
0041 static inline _Py_CODEUNIT
0042 _py_make_codeunit(uint8_t opcode, uint8_t oparg)
0043 {
0044
0045 _Py_CODEUNIT word;
0046 word.op.code = opcode;
0047 word.op.arg = oparg;
0048 return word;
0049 }
0050
0051 static inline void
0052 _py_set_opcode(_Py_CODEUNIT *word, uint8_t opcode)
0053 {
0054 word->op.code = opcode;
0055 }
0056
0057 #define _Py_MAKE_CODEUNIT(opcode, oparg) _py_make_codeunit((opcode), (oparg))
0058 #define _Py_SET_OPCODE(word, opcode) _py_set_opcode(&(word), (opcode))
0059
0060
0061
0062
0063 #define _PyCode_HAS_EXECUTORS(CODE) \
0064 (CODE->co_executors != NULL)
0065 #define _PyCode_HAS_INSTRUMENTATION(CODE) \
0066 (CODE->_co_instrumentation_version > 0)
0067
0068 struct _py_code_state {
0069 PyMutex mutex;
0070
0071 struct _Py_hashtable_t *constants;
0072 };
0073
0074 extern PyStatus _PyCode_Init(PyInterpreterState *interp);
0075 extern void _PyCode_Fini(PyInterpreterState *interp);
0076
0077 #define CODE_MAX_WATCHERS 8
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088 #define CACHE_ENTRIES(cache) (sizeof(cache)/sizeof(_Py_CODEUNIT))
0089
0090 typedef struct {
0091 _Py_BackoffCounter counter;
0092 uint16_t module_keys_version;
0093 uint16_t builtin_keys_version;
0094 uint16_t index;
0095 } _PyLoadGlobalCache;
0096
0097 #define INLINE_CACHE_ENTRIES_LOAD_GLOBAL CACHE_ENTRIES(_PyLoadGlobalCache)
0098
0099 typedef struct {
0100 _Py_BackoffCounter counter;
0101 } _PyBinaryOpCache;
0102
0103 #define INLINE_CACHE_ENTRIES_BINARY_OP CACHE_ENTRIES(_PyBinaryOpCache)
0104
0105 typedef struct {
0106 _Py_BackoffCounter counter;
0107 } _PyUnpackSequenceCache;
0108
0109 #define INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE \
0110 CACHE_ENTRIES(_PyUnpackSequenceCache)
0111
0112 typedef struct {
0113 _Py_BackoffCounter counter;
0114 } _PyCompareOpCache;
0115
0116 #define INLINE_CACHE_ENTRIES_COMPARE_OP CACHE_ENTRIES(_PyCompareOpCache)
0117
0118 typedef struct {
0119 _Py_BackoffCounter counter;
0120 } _PyBinarySubscrCache;
0121
0122 #define INLINE_CACHE_ENTRIES_BINARY_SUBSCR CACHE_ENTRIES(_PyBinarySubscrCache)
0123
0124 typedef struct {
0125 _Py_BackoffCounter counter;
0126 } _PySuperAttrCache;
0127
0128 #define INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR CACHE_ENTRIES(_PySuperAttrCache)
0129
0130 typedef struct {
0131 _Py_BackoffCounter counter;
0132 uint16_t version[2];
0133 uint16_t index;
0134 } _PyAttrCache;
0135
0136 typedef struct {
0137 _Py_BackoffCounter counter;
0138 uint16_t type_version[2];
0139 union {
0140 uint16_t keys_version[2];
0141 uint16_t dict_offset;
0142 };
0143 uint16_t descr[4];
0144 } _PyLoadMethodCache;
0145
0146
0147
0148 #define INLINE_CACHE_ENTRIES_LOAD_ATTR CACHE_ENTRIES(_PyLoadMethodCache)
0149
0150 #define INLINE_CACHE_ENTRIES_STORE_ATTR CACHE_ENTRIES(_PyAttrCache)
0151
0152 typedef struct {
0153 _Py_BackoffCounter counter;
0154 uint16_t func_version[2];
0155 } _PyCallCache;
0156
0157 #define INLINE_CACHE_ENTRIES_CALL CACHE_ENTRIES(_PyCallCache)
0158
0159 typedef struct {
0160 _Py_BackoffCounter counter;
0161 } _PyStoreSubscrCache;
0162
0163 #define INLINE_CACHE_ENTRIES_STORE_SUBSCR CACHE_ENTRIES(_PyStoreSubscrCache)
0164
0165 typedef struct {
0166 _Py_BackoffCounter counter;
0167 } _PyForIterCache;
0168
0169 #define INLINE_CACHE_ENTRIES_FOR_ITER CACHE_ENTRIES(_PyForIterCache)
0170
0171 typedef struct {
0172 _Py_BackoffCounter counter;
0173 } _PySendCache;
0174
0175 #define INLINE_CACHE_ENTRIES_SEND CACHE_ENTRIES(_PySendCache)
0176
0177 typedef struct {
0178 _Py_BackoffCounter counter;
0179 uint16_t version[2];
0180 } _PyToBoolCache;
0181
0182 #define INLINE_CACHE_ENTRIES_TO_BOOL CACHE_ENTRIES(_PyToBoolCache)
0183
0184 typedef struct {
0185 _Py_BackoffCounter counter;
0186 } _PyContainsOpCache;
0187
0188 #define INLINE_CACHE_ENTRIES_CONTAINS_OP CACHE_ENTRIES(_PyContainsOpCache)
0189
0190
0191 struct callable_cache {
0192 PyObject *isinstance;
0193 PyObject *len;
0194 PyObject *list_append;
0195 PyObject *object__getattribute__;
0196 };
0197
0198
0199
0200
0201
0202
0203
0204
0205
0206
0207
0208
0209
0210
0211
0212
0213
0214
0215
0216
0217 #define CO_FAST_HIDDEN 0x10
0218 #define CO_FAST_LOCAL 0x20
0219 #define CO_FAST_CELL 0x40
0220 #define CO_FAST_FREE 0x80
0221
0222 typedef unsigned char _PyLocals_Kind;
0223
0224 static inline _PyLocals_Kind
0225 _PyLocals_GetKind(PyObject *kinds, int i)
0226 {
0227 assert(PyBytes_Check(kinds));
0228 assert(0 <= i && i < PyBytes_GET_SIZE(kinds));
0229 char *ptr = PyBytes_AS_STRING(kinds);
0230 return (_PyLocals_Kind)(ptr[i]);
0231 }
0232
0233 static inline void
0234 _PyLocals_SetKind(PyObject *kinds, int i, _PyLocals_Kind kind)
0235 {
0236 assert(PyBytes_Check(kinds));
0237 assert(0 <= i && i < PyBytes_GET_SIZE(kinds));
0238 char *ptr = PyBytes_AS_STRING(kinds);
0239 ptr[i] = (char) kind;
0240 }
0241
0242
0243 struct _PyCodeConstructor {
0244
0245 PyObject *filename;
0246 PyObject *name;
0247 PyObject *qualname;
0248 int flags;
0249
0250
0251 PyObject *code;
0252 int firstlineno;
0253 PyObject *linetable;
0254
0255
0256 PyObject *consts;
0257 PyObject *names;
0258
0259
0260 PyObject *localsplusnames;
0261 PyObject *localspluskinds;
0262
0263
0264 int argcount;
0265 int posonlyargcount;
0266
0267 int kwonlyargcount;
0268
0269
0270 int stacksize;
0271
0272
0273 PyObject *exceptiontable;
0274 };
0275
0276
0277
0278
0279
0280
0281
0282
0283
0284
0285 extern int _PyCode_Validate(struct _PyCodeConstructor *);
0286 extern PyCodeObject* _PyCode_New(struct _PyCodeConstructor *);
0287
0288
0289
0290
0291
0292 extern PyObject* _PyCode_GetVarnames(PyCodeObject *);
0293 extern PyObject* _PyCode_GetCellvars(PyCodeObject *);
0294 extern PyObject* _PyCode_GetFreevars(PyCodeObject *);
0295 extern PyObject* _PyCode_GetCode(PyCodeObject *);
0296
0297
0298 extern int _PyCode_InitAddressRange(PyCodeObject* co, PyCodeAddressRange *bounds);
0299
0300
0301 extern void _PyLineTable_InitAddressRange(
0302 const char *linetable,
0303 Py_ssize_t length,
0304 int firstlineno,
0305 PyCodeAddressRange *range);
0306
0307
0308 extern int _PyLineTable_NextAddressRange(PyCodeAddressRange *range);
0309 extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range);
0310
0311
0312 extern void _PyCode_Clear_Executors(PyCodeObject *code);
0313
0314 #ifdef Py_GIL_DISABLED
0315
0316 #define ENABLE_SPECIALIZATION 0
0317 #else
0318 #define ENABLE_SPECIALIZATION 1
0319 #endif
0320
0321
0322
0323 extern void _Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls,
0324 _Py_CODEUNIT *instr, int load_method);
0325 extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr,
0326 PyObject *name);
0327 extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr,
0328 PyObject *name);
0329 extern void _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins,
0330 _Py_CODEUNIT *instr, PyObject *name);
0331 extern void _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container,
0332 _Py_CODEUNIT *instr);
0333 extern void _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub,
0334 _Py_CODEUNIT *instr);
0335 extern void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr,
0336 int nargs);
0337 extern void _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
0338 int oparg, PyObject **locals);
0339 extern void _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs,
0340 _Py_CODEUNIT *instr, int oparg);
0341 extern void _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr,
0342 int oparg);
0343 extern void _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg);
0344 extern void _Py_Specialize_Send(PyObject *receiver, _Py_CODEUNIT *instr);
0345 extern void _Py_Specialize_ToBool(PyObject *value, _Py_CODEUNIT *instr);
0346 extern void _Py_Specialize_ContainsOp(PyObject *value, _Py_CODEUNIT *instr);
0347
0348 #ifdef Py_STATS
0349
0350 #include "pycore_bitutils.h" // _Py_bit_length
0351
0352 #define STAT_INC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name++; } while (0)
0353 #define STAT_DEC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name--; } while (0)
0354 #define OPCODE_EXE_INC(opname) do { if (_Py_stats) _Py_stats->opcode_stats[opname].execution_count++; } while (0)
0355 #define CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.name++; } while (0)
0356 #define OBJECT_STAT_INC(name) do { if (_Py_stats) _Py_stats->object_stats.name++; } while (0)
0357 #define OBJECT_STAT_INC_COND(name, cond) \
0358 do { if (_Py_stats && cond) _Py_stats->object_stats.name++; } while (0)
0359 #define EVAL_CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.eval_calls[name]++; } while (0)
0360 #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) \
0361 do { if (_Py_stats && PyFunction_Check(callable)) _Py_stats->call_stats.eval_calls[name]++; } while (0)
0362 #define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0)
0363 #define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0)
0364 #define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0)
0365 #define UOP_PAIR_INC(uopcode, lastuop) \
0366 do { \
0367 if (lastuop && _Py_stats) { \
0368 _Py_stats->optimization_stats.opcode[lastuop].pair_count[uopcode]++; \
0369 } \
0370 lastuop = uopcode; \
0371 } while (0)
0372 #define OPT_UNSUPPORTED_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.unsupported_opcode[opname]++; } while (0)
0373 #define OPT_ERROR_IN_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.error_in_opcode[opname]++; } while (0)
0374 #define OPT_HIST(length, name) \
0375 do { \
0376 if (_Py_stats) { \
0377 int bucket = _Py_bit_length(length >= 1 ? length - 1 : 0); \
0378 bucket = (bucket >= _Py_UOP_HIST_SIZE) ? _Py_UOP_HIST_SIZE - 1 : bucket; \
0379 _Py_stats->optimization_stats.name[bucket]++; \
0380 } \
0381 } while (0)
0382 #define RARE_EVENT_STAT_INC(name) do { if (_Py_stats) _Py_stats->rare_event_stats.name++; } while (0)
0383
0384
0385 PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void);
0386
0387 #else
0388 #define STAT_INC(opname, name) ((void)0)
0389 #define STAT_DEC(opname, name) ((void)0)
0390 #define OPCODE_EXE_INC(opname) ((void)0)
0391 #define CALL_STAT_INC(name) ((void)0)
0392 #define OBJECT_STAT_INC(name) ((void)0)
0393 #define OBJECT_STAT_INC_COND(name, cond) ((void)0)
0394 #define EVAL_CALL_STAT_INC(name) ((void)0)
0395 #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0)
0396 #define GC_STAT_ADD(gen, name, n) ((void)0)
0397 #define OPT_STAT_INC(name) ((void)0)
0398 #define UOP_STAT_INC(opname, name) ((void)0)
0399 #define UOP_PAIR_INC(uopcode, lastuop) ((void)0)
0400 #define OPT_UNSUPPORTED_OPCODE(opname) ((void)0)
0401 #define OPT_ERROR_IN_OPCODE(opname) ((void)0)
0402 #define OPT_HIST(length, name) ((void)0)
0403 #define RARE_EVENT_STAT_INC(name) ((void)0)
0404 #endif
0405
0406
0407
0408
0409
0410
0411
0412
0413
0414
0415 static inline void
0416 write_u32(uint16_t *p, uint32_t val)
0417 {
0418 memcpy(p, &val, sizeof(val));
0419 }
0420
0421 static inline void
0422 write_u64(uint16_t *p, uint64_t val)
0423 {
0424 memcpy(p, &val, sizeof(val));
0425 }
0426
0427 static inline void
0428 write_obj(uint16_t *p, PyObject *val)
0429 {
0430 memcpy(p, &val, sizeof(val));
0431 }
0432
0433 static inline uint16_t
0434 read_u16(uint16_t *p)
0435 {
0436 return *p;
0437 }
0438
0439 static inline uint32_t
0440 read_u32(uint16_t *p)
0441 {
0442 uint32_t val;
0443 memcpy(&val, p, sizeof(val));
0444 return val;
0445 }
0446
0447 static inline uint64_t
0448 read_u64(uint16_t *p)
0449 {
0450 uint64_t val;
0451 memcpy(&val, p, sizeof(val));
0452 return val;
0453 }
0454
0455 static inline PyObject *
0456 read_obj(uint16_t *p)
0457 {
0458 PyObject *val;
0459 memcpy(&val, p, sizeof(val));
0460 return val;
0461 }
0462
0463
0464
0465 static inline unsigned char *
0466 parse_varint(unsigned char *p, int *result) {
0467 int val = p[0] & 63;
0468 while (p[0] & 64) {
0469 p++;
0470 val = (val << 6) | (p[0] & 63);
0471 }
0472 *result = val;
0473 return p+1;
0474 }
0475
0476 static inline int
0477 write_varint(uint8_t *ptr, unsigned int val)
0478 {
0479 int written = 1;
0480 while (val >= 64) {
0481 *ptr++ = 64 | (val & 63);
0482 val >>= 6;
0483 written++;
0484 }
0485 *ptr = (uint8_t)val;
0486 return written;
0487 }
0488
0489 static inline int
0490 write_signed_varint(uint8_t *ptr, int val)
0491 {
0492 unsigned int uval;
0493 if (val < 0) {
0494
0495 uval = ((0 - (unsigned int)val) << 1) | 1;
0496 }
0497 else {
0498 uval = (unsigned int)val << 1;
0499 }
0500 return write_varint(ptr, uval);
0501 }
0502
0503 static inline int
0504 write_location_entry_start(uint8_t *ptr, int code, int length)
0505 {
0506 assert((code & 15) == code);
0507 *ptr = 128 | (uint8_t)(code << 3) | (uint8_t)(length - 1);
0508 return 1;
0509 }
0510
0511
0512
0513
0514
0515
0516
0517
0518
0519
0520
0521 #include "pycore_backoff.h"
0522
0523
0524
0525
0526
0527 #define ADAPTIVE_WARMUP_VALUE 1
0528 #define ADAPTIVE_WARMUP_BACKOFF 1
0529
0530
0531
0532
0533
0534
0535
0536 #define ADAPTIVE_COOLDOWN_VALUE 52
0537 #define ADAPTIVE_COOLDOWN_BACKOFF 0
0538
0539
0540 #if COLD_EXIT_INITIAL_VALUE <= ADAPTIVE_COOLDOWN_VALUE
0541 # error "Cold exit value should be larger than adaptive cooldown value"
0542 #endif
0543
0544 static inline _Py_BackoffCounter
0545 adaptive_counter_bits(uint16_t value, uint16_t backoff) {
0546 return make_backoff_counter(value, backoff);
0547 }
0548
0549 static inline _Py_BackoffCounter
0550 adaptive_counter_warmup(void) {
0551 return adaptive_counter_bits(ADAPTIVE_WARMUP_VALUE,
0552 ADAPTIVE_WARMUP_BACKOFF);
0553 }
0554
0555 static inline _Py_BackoffCounter
0556 adaptive_counter_cooldown(void) {
0557 return adaptive_counter_bits(ADAPTIVE_COOLDOWN_VALUE,
0558 ADAPTIVE_COOLDOWN_BACKOFF);
0559 }
0560
0561 static inline _Py_BackoffCounter
0562 adaptive_counter_backoff(_Py_BackoffCounter counter) {
0563 return restart_backoff_counter(counter);
0564 }
0565
0566
0567
0568
0569
0570 #define COMPARISON_BIT(x, y) (1 << (2 * ((x) >= (y)) + ((x) <= (y))))
0571
0572
0573
0574
0575
0576
0577
0578
0579 #define COMPARISON_UNORDERED 1
0580
0581 #define COMPARISON_LESS_THAN 2
0582 #define COMPARISON_GREATER_THAN 4
0583 #define COMPARISON_EQUALS 8
0584
0585 #define COMPARISON_NOT_EQUALS (COMPARISON_UNORDERED | COMPARISON_LESS_THAN | COMPARISON_GREATER_THAN)
0586
0587 extern int _Py_Instrument(PyCodeObject *co, PyInterpreterState *interp);
0588
0589 extern int _Py_GetBaseOpcode(PyCodeObject *code, int offset);
0590
0591 extern int _PyInstruction_GetLength(PyCodeObject *code, int offset);
0592
0593 #ifdef __cplusplus
0594 }
0595 #endif
0596 #endif