summaryrefslogtreecommitdiffstats
path: root/Include
diff options
context:
space:
mode:
authorMark Shannon <mark@hotpy.org>2022-06-30 21:03:37 (GMT)
committerGitHub <noreply@github.com>2022-06-30 21:03:37 (GMT)
commit113b309f18103343a195f7c53c41763acf295167 (patch)
tree4c4792ad8d608f454e0895fda41539b546d347b7 /Include
parent6c405381918a358a62ce72798dd37c2a3d2a9be2 (diff)
downloadcpython-113b309f18103343a195f7c53c41763acf295167.zip
cpython-113b309f18103343a195f7c53c41763acf295167.tar.gz
cpython-113b309f18103343a195f7c53c41763acf295167.tar.bz2
[3.11] GH-93354: Use exponential backoff to avoid excessive specialization attempts (GH-93355) (GH-93379)
Co-authored-by: Mark Shannon <mark@hotpy.org> Co-authored-by: Ɓukasz Langa <lukasz@langa.pl>
Diffstat (limited to 'Include')
-rw-r--r--Include/internal/pycore_code.h47
1 files changed, 44 insertions, 3 deletions
diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h
index 551b9c0..9528c89 100644
--- a/Include/internal/pycore_code.h
+++ b/Include/internal/pycore_code.h
@@ -233,9 +233,6 @@ extern void _PyLineTable_InitAddressRange(
extern int _PyLineTable_NextAddressRange(PyCodeAddressRange *range);
extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range);
-
-#define ADAPTIVE_CACHE_BACKOFF 64
-
/* Specialization functions */
extern int _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr,
@@ -475,6 +472,50 @@ write_location_entry_start(uint8_t *ptr, int code, int length)
}
+/** Counters
+ * The first 16-bit value in each inline cache is a counter.
+ * When counting misses, the counter is treated as a simple unsigned value.
+ *
+ * When counting executions until the next specialization attempt,
+ * exponential backoff is used to reduce the number of specialization failures.
+ * The high 12 bits store the counter, the low 4 bits store the backoff exponent.
+ * On a specialization failure, the backoff exponent is incremented and the
+ * counter set to (2**backoff - 1).
+ * Backoff == 6 -> starting counter == 63, backoff == 10 -> starting counter == 1023.
+ */
+
+/* With a 16-bit counter, we have 12 bits for the counter value, and 4 bits for the backoff */
+#define ADAPTIVE_BACKOFF_BITS 4
+/* The initial counter value is 31 == 2**ADAPTIVE_BACKOFF_START - 1 */
+#define ADAPTIVE_BACKOFF_START 5
+
+#define MAX_BACKOFF_VALUE (16 - ADAPTIVE_BACKOFF_BITS)
+
+
+static inline uint16_t
+adaptive_counter_bits(int value, int backoff) {
+ return (value << ADAPTIVE_BACKOFF_BITS) |
+ (backoff & ((1<<ADAPTIVE_BACKOFF_BITS)-1));
+}
+
+static inline uint16_t
+adaptive_counter_start(void) {
+ unsigned int value = (1 << ADAPTIVE_BACKOFF_START) - 1;
+ return adaptive_counter_bits(value, ADAPTIVE_BACKOFF_START);
+}
+
+static inline uint16_t
+adaptive_counter_backoff(uint16_t counter) {
+ unsigned int backoff = counter & ((1<<ADAPTIVE_BACKOFF_BITS)-1);
+ backoff++;
+ if (backoff > MAX_BACKOFF_VALUE) {
+ backoff = MAX_BACKOFF_VALUE;
+ }
+ unsigned int value = (1 << backoff) - 1;
+ return adaptive_counter_bits(value, backoff);
+}
+
+
/* Line array cache for tracing */
extern int _PyCode_CreateLineArray(PyCodeObject *co);