summaryrefslogtreecommitdiffstats
path: root/Include
diff options
context:
space:
mode:
Diffstat (limited to 'Include')
-rw-r--r--Include/cpython/pystats.h28
-rw-r--r--Include/internal/pycore_code.h15
2 files changed, 39 insertions, 4 deletions
diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h
index 150e16f..056406e 100644
--- a/Include/cpython/pystats.h
+++ b/Include/cpython/pystats.h
@@ -86,10 +86,6 @@ typedef struct _object_stats {
uint64_t type_cache_dunder_hits;
uint64_t type_cache_dunder_misses;
uint64_t type_cache_collisions;
- uint64_t optimization_attempts;
- uint64_t optimization_traces_created;
- uint64_t optimization_traces_executed;
- uint64_t optimization_uops_executed;
/* Temporary value used during GC */
uint64_t object_visits;
} ObjectStats;
@@ -100,10 +96,34 @@ typedef struct _gc_stats {
uint64_t objects_collected;
} GCStats;
+typedef struct _uop_stats {
+ uint64_t execution_count;
+} UOpStats;
+
+#define _Py_UOP_HIST_SIZE 32
+
+typedef struct _optimization_stats {
+ uint64_t attempts;
+ uint64_t traces_created;
+ uint64_t traces_executed;
+ uint64_t uops_executed;
+ uint64_t trace_stack_overflow;
+ uint64_t trace_stack_underflow;
+ uint64_t trace_too_long;
+ uint64_t inner_loop;
+ uint64_t recursive_call;
+ UOpStats opcode[512];
+ uint64_t unsupported_opcode[256];
+ uint64_t trace_length_hist[_Py_UOP_HIST_SIZE];
+ uint64_t trace_run_length_hist[_Py_UOP_HIST_SIZE];
+ uint64_t optimized_trace_length_hist[_Py_UOP_HIST_SIZE];
+} OptimizationStats;
+
typedef struct _stats {
OpcodeStats opcode_stats[256];
CallStats call_stats;
ObjectStats object_stats;
+ OptimizationStats optimization_stats;
GCStats *gc_stats;
} PyStats;
diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h
index a77fa11..d31d836 100644
--- a/Include/internal/pycore_code.h
+++ b/Include/internal/pycore_code.h
@@ -282,6 +282,17 @@ extern int _PyStaticCode_Init(PyCodeObject *co);
#define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) \
do { if (_Py_stats && PyFunction_Check(callable)) _Py_stats->call_stats.eval_calls[name]++; } while (0)
#define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0)
+#define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0)
+#define UOP_EXE_INC(opname) do { if (_Py_stats) _Py_stats->optimization_stats.opcode[opname].execution_count++; } while (0)
+#define OPT_UNSUPPORTED_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.unsupported_opcode[opname]++; } while (0)
+#define OPT_HIST(length, name) \
+ do { \
+ if (_Py_stats) { \
+ int bucket = _Py_bit_length(length >= 1 ? length - 1 : 0); \
+ bucket = (bucket >= _Py_UOP_HIST_SIZE) ? _Py_UOP_HIST_SIZE - 1 : bucket; \
+ _Py_stats->optimization_stats.name[bucket]++; \
+ } \
+ } while (0)
// Export for '_opcode' shared extension
PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void);
@@ -296,6 +307,10 @@ PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void);
#define EVAL_CALL_STAT_INC(name) ((void)0)
#define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0)
#define GC_STAT_ADD(gen, name, n) ((void)0)
+#define OPT_STAT_INC(name) ((void)0)
+#define UOP_EXE_INC(opname) ((void)0)
+#define OPT_UNSUPPORTED_OPCODE(opname) ((void)0)
+#define OPT_HIST(length, name) ((void)0)
#endif // !Py_STATS
// Utility functions for reading/writing 32/64-bit values in the inline caches.