Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 11038c5

Browse files
authored
gh-104584: Move super-instruction special-casing to generator (#106500)
Instead of special-casing specific instructions, we add a few more special values to the 'size' field of expansions, so in the future we can automatically handle additional super-instructions in the generator.
1 parent 363f4f9 commit 11038c5

File tree

3 files changed

+84
-39
lines changed

3 files changed

+84
-39
lines changed

Python/opcode_metadata.h

Lines changed: 9 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Python/optimizer.c

Lines changed: 16 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -411,44 +411,15 @@ translate_bytecode_to_trace(
411411
for (;;) {
412412
ADD_TO_TRACE(SAVE_IP, (int)(instr - (_Py_CODEUNIT *)code->co_code_adaptive));
413413
int opcode = instr->op.code;
414-
uint64_t operand = instr->op.arg;
414+
int oparg = instr->op.arg;
415415
int extras = 0;
416416
while (opcode == EXTENDED_ARG) {
417417
instr++;
418418
extras += 1;
419419
opcode = instr->op.code;
420-
operand = (operand << 8) | instr->op.arg;
420+
oparg = (oparg << 8) | instr->op.arg;
421421
}
422422
switch (opcode) {
423-
case LOAD_FAST_LOAD_FAST:
424-
case STORE_FAST_LOAD_FAST:
425-
case STORE_FAST_STORE_FAST:
426-
{
427-
// Reserve space for two uops (+ SAVE_IP + EXIT_TRACE)
428-
if (trace_length + 4 > max_length) {
429-
DPRINTF(1, "Ran out of space for LOAD_FAST_LOAD_FAST\n");
430-
goto done;
431-
}
432-
uint64_t oparg1 = operand >> 4;
433-
uint64_t oparg2 = operand & 15;
434-
switch (opcode) {
435-
case LOAD_FAST_LOAD_FAST:
436-
ADD_TO_TRACE(LOAD_FAST, oparg1);
437-
ADD_TO_TRACE(LOAD_FAST, oparg2);
438-
break;
439-
case STORE_FAST_LOAD_FAST:
440-
ADD_TO_TRACE(STORE_FAST, oparg1);
441-
ADD_TO_TRACE(LOAD_FAST, oparg2);
442-
break;
443-
case STORE_FAST_STORE_FAST:
444-
ADD_TO_TRACE(STORE_FAST, oparg1);
445-
ADD_TO_TRACE(STORE_FAST, oparg2);
446-
break;
447-
default:
448-
Py_FatalError("Missing case");
449-
}
450-
break;
451-
}
452423
default:
453424
{
454425
const struct opcode_macro_expansion *expansion = &_PyOpcode_macro_expansion[opcode];
@@ -462,9 +433,11 @@ translate_bytecode_to_trace(
462433
goto done;
463434
}
464435
for (int i = 0; i < nuops; i++) {
436+
uint64_t operand;
465437
int offset = expansion->uops[i].offset;
466438
switch (expansion->uops[i].size) {
467-
case 0:
439+
case OPARG_FULL:
440+
operand = oparg;
468441
if (extras && OPCODE_HAS_JUMP(opcode)) {
469442
if (opcode == JUMP_BACKWARD_NO_INTERRUPT) {
470443
operand -= extras;
@@ -475,19 +448,25 @@ translate_bytecode_to_trace(
475448
}
476449
}
477450
break;
478-
case 1:
451+
case OPARG_CACHE_1:
479452
operand = read_u16(&instr[offset].cache);
480453
break;
481-
case 2:
454+
case OPARG_CACHE_2:
482455
operand = read_u32(&instr[offset].cache);
483456
break;
484-
case 4:
457+
case OPARG_CACHE_4:
485458
operand = read_u64(&instr[offset].cache);
486459
break;
460+
case OPARG_TOP: // First half of super-instr
461+
operand = oparg >> 4;
462+
break;
463+
case OPARG_BOTTOM: // Second half of super-instr
464+
operand = oparg & 0xF;
465+
break;
487466
default:
488467
fprintf(stderr,
489-
"opcode=%d, operand=%" PRIu64 "; nuops=%d, i=%d; size=%d, offset=%d\n",
490-
opcode, operand, nuops, i,
468+
"opcode=%d, oparg=%d; nuops=%d, i=%d; size=%d, offset=%d\n",
469+
opcode, oparg, nuops, i,
491470
expansion->uops[i].size,
492471
expansion->uops[i].offset);
493472
Py_FatalError("garbled expansion");

Tools/cases_generator/generate_cases.py

Lines changed: 59 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,17 @@
4040
UNUSED = "unused"
4141
BITS_PER_CODE_UNIT = 16
4242

43+
# Constants used instead of size for macro expansions.
44+
# Note: 1, 2, 4 must match actual cache entry sizes.
45+
OPARG_SIZES = {
46+
"OPARG_FULL": 0,
47+
"OPARG_CACHE_1": 1,
48+
"OPARG_CACHE_2": 2,
49+
"OPARG_CACHE_4": 4,
50+
"OPARG_TOP": 5,
51+
"OPARG_BOTTOM": 6,
52+
}
53+
4354
RESERVED_WORDS = {
4455
"co_consts" : "Use FRAME_CO_CONSTS.",
4556
"co_names": "Use FRAME_CO_NAMES.",
@@ -1213,7 +1224,10 @@ def write_metadata(self) -> None:
12131224
self.out.emit("struct { int16_t uop; int8_t size; int8_t offset; } uops[8];")
12141225
self.out.emit("")
12151226

1227+
for key, value in OPARG_SIZES.items():
1228+
self.out.emit(f"#define {key} {value}")
12161229
self.out.emit("")
1230+
12171231
self.out.emit("#define OPCODE_METADATA_FMT(OP) "
12181232
"(_PyOpcode_opcode_metadata[(OP)].instr_format)")
12191233
self.out.emit("#define SAME_OPCODE_METADATA(OP1, OP2) \\")
@@ -1263,6 +1277,9 @@ def write_metadata(self) -> None:
12631277
# Construct a dummy Component -- input/output mappings are not used
12641278
part = Component(instr, [], [], instr.active_caches)
12651279
self.write_macro_expansions(instr.name, [part])
1280+
elif instr.kind == "inst" and variable_used(instr.inst, "oparg1"):
1281+
assert variable_used(instr.inst, "oparg2"), "Half super-instr?"
1282+
self.write_super_expansions(instr.name)
12661283
case parser.Macro():
12671284
mac = self.macro_instrs[thing.name]
12681285
self.write_macro_expansions(mac.name, mac.parts)
@@ -1342,18 +1359,58 @@ def write_macro_expansions(self, name: str, parts: MacroParts) -> None:
13421359
print(f"NOTE: Part {part.instr.name} of {name} is not a viable uop")
13431360
return
13441361
if part.instr.instr_flags.HAS_ARG_FLAG or not part.active_caches:
1345-
size, offset = 0, 0
1362+
size, offset = OPARG_SIZES["OPARG_FULL"], 0
13461363
else:
13471364
# If this assert triggers, is_viable_uops() lied
13481365
assert len(part.active_caches) == 1, (name, part.instr.name)
13491366
cache = part.active_caches[0]
13501367
size, offset = cache.effect.size, cache.offset
13511368
expansions.append((part.instr.name, size, offset))
13521369
assert len(expansions) > 0, f"Macro {name} has empty expansion?!"
1370+
self.write_expansions(name, expansions)
1371+
1372+
def write_super_expansions(self, name: str) -> None:
1373+
"""Write special macro expansions for super-instructions.
1374+
1375+
If you get an assertion failure here, you probably have accidentally
1376+
violated one of the assumptions here.
1377+
1378+
- A super-instruction's name is of the form FIRST_SECOND where
1379+
FIRST and SECOND are regular instructions whose name has the
1380+
form FOO_BAR. Thus, there must be exactly 3 underscores.
1381+
Example: LOAD_CONST_STORE_FAST.
1382+
1383+
- A super-instruction's body uses `oparg1 and `oparg2`, and no
1384+
other instruction's body uses those variable names.
1385+
1386+
- A super-instruction has no active (used) cache entries.
1387+
1388+
In the expansion, the first instruction's operand is all but the
1389+
bottom 4 bits of the super-instruction's oparg, and the second
1390+
instruction's operand is the bottom 4 bits. We use the special
1391+
size codes OPARG_TOP and OPARG_BOTTOM for these.
1392+
"""
1393+
pieces = name.split("_")
1394+
assert len(pieces) == 4, f"{name} doesn't look like a super-instr"
1395+
name1 = "_".join(pieces[:2])
1396+
name2 = "_".join(pieces[2:])
1397+
assert name1 in self.instrs, f"{name1} doesn't match any instr"
1398+
assert name2 in self.instrs, f"{name2} doesn't match any instr"
1399+
instr1 = self.instrs[name1]
1400+
instr2 = self.instrs[name2]
1401+
assert not instr1.active_caches, f"{name1} has active caches"
1402+
assert not instr2.active_caches, f"{name2} has active caches"
1403+
expansions = [
1404+
(name1, OPARG_SIZES["OPARG_TOP"], 0),
1405+
(name2, OPARG_SIZES["OPARG_BOTTOM"], 0),
1406+
]
1407+
self.write_expansions(name, expansions)
1408+
1409+
def write_expansions(self, name: str, expansions: list[tuple[str, int, int]]) -> None:
13531410
pieces = [f"{{ {name}, {size}, {offset} }}" for name, size, offset in expansions]
13541411
self.out.emit(
13551412
f"[{name}] = "
1356-
f"{{ .nuops = {len(expansions)}, .uops = {{ {', '.join(pieces)} }} }},"
1413+
f"{{ .nuops = {len(pieces)}, .uops = {{ {', '.join(pieces)} }} }},"
13571414
)
13581415

13591416
def emit_metadata_entry(

0 commit comments

Comments
 (0)