Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 5099356

Browse files
committed
Baby steps: reimplement thresholds using adaptive counter abstractions
1 parent 50f9b0b commit 5099356

File tree

4 files changed

+66
-68
lines changed

4 files changed

+66
-68
lines changed

Include/internal/pycore_code.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -498,6 +498,11 @@ adaptive_counter_cooldown(void) {
498498
ADAPTIVE_COOLDOWN_BACKOFF);
499499
}
500500

501+
static inline uint16_t
502+
adaptive_counter_jump_init(void) {
503+
return adaptive_counter_bits(16, 4);
504+
}
505+
501506
static inline uint16_t
502507
adaptive_counter_backoff(uint16_t counter) {
503508
uint16_t backoff = counter & ((1 << ADAPTIVE_BACKOFF_BITS) - 1);

Python/bytecodes.c

Lines changed: 27 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -2340,41 +2340,39 @@ dummy_func(
23402340
JUMPBY(-oparg);
23412341
#if ENABLE_SPECIALIZATION
23422342
uint16_t counter = this_instr[1].cache;
2343-
this_instr[1].cache = counter + (1 << OPTIMIZER_BITS_IN_COUNTER);
2344-
/* We are using unsigned values, but we really want signed values, so
2345-
* do the 2s complement adjustment manually */
2346-
uint32_t offset_counter = counter ^ (1 << 15);
2347-
uint32_t threshold = tstate->interp->optimizer_backedge_threshold;
2348-
assert((threshold & OPTIMIZER_BITS_MASK) == 0);
2349-
// Use '>=' not '>' so that the optimizer/backoff bits do not effect the result.
2350-
// Double-check that the opcode isn't instrumented or something:
2351-
if (offset_counter >= threshold && this_instr->op.code == JUMP_BACKWARD) {
2352-
_Py_CODEUNIT *start = this_instr;
2353-
/* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */
2354-
while (oparg > 255) {
2355-
oparg >>= 8;
2356-
start--;
2357-
}
2358-
_PyExecutorObject *executor;
2359-
int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor);
2360-
ERROR_IF(optimized < 0, error);
2361-
if (optimized) {
2362-
assert(tstate->previous_executor == NULL);
2363-
tstate->previous_executor = Py_None;
2364-
GOTO_TIER_TWO(executor);
2343+
if (ADAPTIVE_COUNTER_IS_ZERO(counter) && this_instr->op.code == JUMP_BACKWARD) {
2344+
if (counter == 0) {
2345+
// Dynamically initialize the counter
2346+
PyInterpreterState *interp = tstate->interp;
2347+
if (interp->optimizer_backedge_threshold != OPTIMIZER_UNREACHABLE_THRESHOLD) {
2348+
counter = interp->optimizer_backedge_threshold;
2349+
assert(counter != 0);
2350+
this_instr[1].cache = counter;
2351+
}
23652352
}
23662353
else {
2367-
int backoff = this_instr[1].cache & OPTIMIZER_BITS_MASK;
2368-
backoff++;
2369-
if (backoff < MIN_TIER2_BACKOFF) {
2370-
backoff = MIN_TIER2_BACKOFF;
2354+
_Py_CODEUNIT *start = this_instr;
2355+
/* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */
2356+
while (oparg > 255) {
2357+
oparg >>= 8;
2358+
start--;
23712359
}
2372-
else if (backoff > MAX_TIER2_BACKOFF) {
2373-
backoff = MAX_TIER2_BACKOFF;
2360+
_PyExecutorObject *executor;
2361+
int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor);
2362+
ERROR_IF(optimized < 0, error);
2363+
if (optimized) {
2364+
assert(tstate->previous_executor == NULL);
2365+
tstate->previous_executor = Py_None;
2366+
GOTO_TIER_TWO(executor);
2367+
}
2368+
else {
2369+
this_instr[1].cache = adaptive_counter_backoff(counter);
23742370
}
2375-
this_instr[1].cache = ((UINT16_MAX << OPTIMIZER_BITS_IN_COUNTER) << backoff) | backoff;
23762371
}
23772372
}
2373+
else {
2374+
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
2375+
}
23782376
#endif /* ENABLE_SPECIALIZATION */
23792377
}
23802378

Python/generated_cases.c.h

Lines changed: 27 additions & 29 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Python/optimizer.c

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -110,9 +110,7 @@ never_optimize(
110110
_PyExecutorObject **exec,
111111
int Py_UNUSED(stack_entries))
112112
{
113-
/* Although it should be benign for this to be called,
114-
* it shouldn't happen, so fail in debug builds. */
115-
assert(0 && "never optimize should never be called");
113+
// This may be called if the optimizer is reset
116114
return 0;
117115
}
118116

@@ -135,17 +133,16 @@ static _PyOptimizerObject _PyOptimizer_Default = {
135133
static uint32_t
136134
shift_and_offset_threshold(uint32_t threshold)
137135
{
138-
return (threshold << OPTIMIZER_BITS_IN_COUNTER) + (1 << 15);
136+
if (threshold == OPTIMIZER_UNREACHABLE_THRESHOLD) {
137+
return threshold;
138+
}
139+
return adaptive_counter_bits(threshold - 1, MIN_TIER2_BACKOFF);
139140
}
140141

141142
_PyOptimizerObject *
142143
PyUnstable_GetOptimizer(void)
143144
{
144145
PyInterpreterState *interp = _PyInterpreterState_GET();
145-
assert(interp->optimizer_backedge_threshold ==
146-
shift_and_offset_threshold(interp->optimizer->backedge_threshold));
147-
assert(interp->optimizer_resume_threshold ==
148-
shift_and_offset_threshold(interp->optimizer->resume_threshold));
149146
if (interp->optimizer == &_PyOptimizer_Default) {
150147
return NULL;
151148
}
@@ -188,8 +185,8 @@ _Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject *optimizer)
188185
interp->optimizer_resume_threshold = shift_and_offset_threshold(optimizer->resume_threshold);
189186
interp->optimizer_side_threshold = optimizer->side_threshold;
190187
if (optimizer == &_PyOptimizer_Default) {
191-
assert(interp->optimizer_backedge_threshold > (1 << 16));
192-
assert(interp->optimizer_resume_threshold > (1 << 16));
188+
assert(interp->optimizer_backedge_threshold == OPTIMIZER_UNREACHABLE_THRESHOLD);
189+
assert(interp->optimizer_resume_threshold == OPTIMIZER_UNREACHABLE_THRESHOLD);
193190
}
194191
return old;
195192
}

0 commit comments

Comments
 (0)