summaryrefslogtreecommitdiffstats
path: root/Python
diff options
context:
space:
mode:
authorMark Shannon <mark@hotpy.org>2023-11-09 13:49:51 (GMT)
committerGitHub <noreply@github.com>2023-11-09 13:49:51 (GMT)
commit34a03e951b027902d993c7066ba8e6b7e92cb2a9 (patch)
tree3a25738c13185ea521b0e8bad09e0ee2eda660a7 /Python
parent25c49564880e6868e4c76602f9f1650f0bc71c75 (diff)
downloadcpython-34a03e951b027902d993c7066ba8e6b7e92cb2a9.zip
cpython-34a03e951b027902d993c7066ba8e6b7e92cb2a9.tar.gz
cpython-34a03e951b027902d993c7066ba8e6b7e92cb2a9.tar.bz2
GH-111843: Tier 2 exponential backoff (GH-111850)
Diffstat (limited to 'Python')
-rw-r--r--Python/bytecodes.c23
-rw-r--r--Python/generated_cases.c.h23
-rw-r--r--Python/optimizer.c9
3 files changed, 41 insertions, 14 deletions
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 7010042..d914f27 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -2335,10 +2335,12 @@ dummy_func(
JUMPBY(-oparg);
#if ENABLE_SPECIALIZATION
this_instr[1].cache += (1 << OPTIMIZER_BITS_IN_COUNTER);
- if (this_instr[1].cache > tstate->interp->optimizer_backedge_threshold &&
- // Double-check that the opcode isn't instrumented or something:
- this_instr->op.code == JUMP_BACKWARD)
- {
+ /* We are using unsigned values, but we really want signed values, so
+ * do the 2s complement comparison manually */
+ uint16_t ucounter = this_instr[1].cache + (1 << 15);
+ uint16_t threshold = tstate->interp->optimizer_backedge_threshold + (1 << 15);
+ // Double-check that the opcode isn't instrumented or something:
+ if (ucounter > threshold && this_instr->op.code == JUMP_BACKWARD) {
OPT_STAT_INC(attempts);
int optimized = _PyOptimizer_BackEdge(frame, this_instr, next_instr, stack_pointer);
ERROR_IF(optimized < 0, error);
@@ -2346,8 +2348,19 @@ dummy_func(
// Rewind and enter the executor:
assert(this_instr->op.code == ENTER_EXECUTOR);
next_instr = this_instr;
+ this_instr[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
+ }
+ else {
+ int backoff = this_instr[1].cache & ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
+ if (backoff < MINIMUM_TIER2_BACKOFF) {
+ backoff = MINIMUM_TIER2_BACKOFF;
+ }
+ else if (backoff < 15 - OPTIMIZER_BITS_IN_COUNTER) {
+ backoff++;
+ }
+ assert(backoff <= 15 - OPTIMIZER_BITS_IN_COUNTER);
+ this_instr[1].cache = ((1 << 16) - ((1 << OPTIMIZER_BITS_IN_COUNTER) << backoff)) | backoff;
}
- this_instr[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
}
#endif /* ENABLE_SPECIALIZATION */
}
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 1c85304..b9a2b22 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -3409,10 +3409,12 @@
JUMPBY(-oparg);
#if ENABLE_SPECIALIZATION
this_instr[1].cache += (1 << OPTIMIZER_BITS_IN_COUNTER);
- if (this_instr[1].cache > tstate->interp->optimizer_backedge_threshold &&
- // Double-check that the opcode isn't instrumented or something:
- this_instr->op.code == JUMP_BACKWARD)
- {
+ /* We are using unsigned values, but we really want signed values, so
+ * do the 2s complement comparison manually */
+ uint16_t ucounter = this_instr[1].cache + (1 << 15);
+ uint16_t threshold = tstate->interp->optimizer_backedge_threshold + (1 << 15);
+ // Double-check that the opcode isn't instrumented or something:
+ if (ucounter > threshold && this_instr->op.code == JUMP_BACKWARD) {
OPT_STAT_INC(attempts);
int optimized = _PyOptimizer_BackEdge(frame, this_instr, next_instr, stack_pointer);
if (optimized < 0) goto error;
@@ -3420,8 +3422,19 @@
// Rewind and enter the executor:
assert(this_instr->op.code == ENTER_EXECUTOR);
next_instr = this_instr;
+ this_instr[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
+ }
+ else {
+ int backoff = this_instr[1].cache & ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
+ if (backoff < MINIMUM_TIER2_BACKOFF) {
+ backoff = MINIMUM_TIER2_BACKOFF;
+ }
+ else if (backoff < 15 - OPTIMIZER_BITS_IN_COUNTER) {
+ backoff++;
+ }
+ assert(backoff <= 15 - OPTIMIZER_BITS_IN_COUNTER);
+ this_instr[1].cache = ((1 << 16) - ((1 << OPTIMIZER_BITS_IN_COUNTER) << backoff)) | backoff;
}
- this_instr[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
}
#endif /* ENABLE_SPECIALIZATION */
DISPATCH();
diff --git a/Python/optimizer.c b/Python/optimizer.c
index 42279be..e142bd0 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -107,6 +107,7 @@ error_optimize(
_PyExecutorObject **exec,
int Py_UNUSED(stack_entries))
{
+ assert(0);
PyErr_Format(PyExc_SystemError, "Should never call error_optimize");
return -1;
}
@@ -122,8 +123,8 @@ PyTypeObject _PyDefaultOptimizer_Type = {
_PyOptimizerObject _PyOptimizer_Default = {
PyObject_HEAD_INIT(&_PyDefaultOptimizer_Type)
.optimize = error_optimize,
- .resume_threshold = UINT16_MAX,
- .backedge_threshold = UINT16_MAX,
+ .resume_threshold = INT16_MAX,
+ .backedge_threshold = INT16_MAX,
};
_PyOptimizerObject *
@@ -309,7 +310,7 @@ PyUnstable_Optimizer_NewCounter(void)
return NULL;
}
opt->base.optimize = counter_optimize;
- opt->base.resume_threshold = UINT16_MAX;
+ opt->base.resume_threshold = INT16_MAX;
opt->base.backedge_threshold = 0;
opt->count = 0;
return (PyObject *)opt;
@@ -915,7 +916,7 @@ PyUnstable_Optimizer_NewUOpOptimizer(void)
return NULL;
}
opt->optimize = uop_optimize;
- opt->resume_threshold = UINT16_MAX;
+ opt->resume_threshold = INT16_MAX;
// Need at least 3 iterations to settle specializations.
// A few lower bits of the counter are reserved for other flags.
opt->backedge_threshold = 16 << OPTIMIZER_BITS_IN_COUNTER;