@@ -132,6 +132,9 @@ pub struct Block {
132132 pub preserve_lasti : bool ,
133133 /// Stack depth at block entry, set by stack depth analysis
134134 pub start_depth : Option < u32 > ,
135+ /// Whether this block is "cold" (only reachable via exception table).
136+ /// Cold blocks are pushed to the end during optimization.
137+ pub cold : bool ,
135138}
136139
137140impl Default for Block {
@@ -142,6 +145,7 @@ impl Default for Block {
142145 except_handler : false ,
143146 preserve_lasti : false ,
144147 start_depth : None ,
148+ cold : false ,
145149 }
146150 }
147151}
@@ -205,6 +209,8 @@ impl CodeInfo {
205209 // Post-codegen CFG analysis passes (flowgraph.c pipeline)
206210 mark_except_handlers ( & mut self . blocks ) ;
207211 label_exception_targets ( & mut self . blocks ) ;
212+ push_cold_blocks_to_end ( & mut self . blocks ) ;
213+ normalize_jumps ( & mut self . blocks ) ;
208214
209215 let max_stackdepth = self . max_stackdepth ( ) ?;
210216 let cell2arg = self . cell2arg ( ) ;
@@ -1154,6 +1160,248 @@ pub(crate) fn mark_except_handlers(blocks: &mut [Block]) {
11541160 }
11551161}
11561162
1163+ /// Mark cold blocks: blocks only reachable via exception table.
1164+ /// BFS from entry following normal control flow (next + jump targets),
1165+ /// skipping edges into except_handler blocks. Unvisited blocks are cold.
1166+ /// flowgraph.c mark_cold
1167+ fn mark_cold ( blocks : & mut [ Block ] ) {
1168+ let n = blocks. len ( ) ;
1169+ let mut warm = vec ! [ false ; n] ;
1170+ let mut queue = std:: collections:: VecDeque :: new ( ) ;
1171+
1172+ // Entry block is always warm
1173+ warm[ 0 ] = true ;
1174+ queue. push_back ( BlockIdx ( 0 ) ) ;
1175+
1176+ while let Some ( block_idx) = queue. pop_front ( ) {
1177+ let block = & blocks[ block_idx. idx ( ) ] ;
1178+
1179+ // Follow fall-through (block.next)
1180+ let has_fallthrough = block
1181+ . instructions
1182+ . last ( )
1183+ . map ( |ins| !ins. instr . is_scope_exit ( ) && !ins. instr . is_unconditional_jump ( ) )
1184+ . unwrap_or ( true ) ;
1185+ if has_fallthrough && block. next != BlockIdx :: NULL {
1186+ let next_idx = block. next . idx ( ) ;
1187+ if !blocks[ next_idx] . except_handler && !warm[ next_idx] {
1188+ warm[ next_idx] = true ;
1189+ queue. push_back ( block. next ) ;
1190+ }
1191+ }
1192+
1193+ // Follow jump targets in instructions
1194+ for instr in & block. instructions {
1195+ if instr. target != BlockIdx :: NULL {
1196+ let target_idx = instr. target . idx ( ) ;
1197+ if !blocks[ target_idx] . except_handler && !warm[ target_idx] {
1198+ warm[ target_idx] = true ;
1199+ queue. push_back ( instr. target ) ;
1200+ }
1201+ }
1202+ }
1203+ }
1204+
1205+ // Mark non-warm blocks as cold
1206+ for ( i, block) in blocks. iter_mut ( ) . enumerate ( ) {
1207+ block. cold = !warm[ i] ;
1208+ }
1209+ }
1210+
1211+ /// Reorder the block linked list to push cold blocks to the end.
1212+ /// If a cold block falls through to a warm block, insert an explicit
1213+ /// JUMP_NO_INTERRUPT to maintain control flow.
1214+ /// flowgraph.c push_cold_blocks_to_end
1215+ fn push_cold_blocks_to_end ( blocks : & mut Vec < Block > ) {
1216+ // Single block, nothing to reorder
1217+ if blocks. len ( ) <= 1 {
1218+ return ;
1219+ }
1220+
1221+ mark_cold ( blocks) ;
1222+
1223+ // If a cold block falls through to a warm block, add an explicit jump
1224+ let fixups: Vec < ( BlockIdx , BlockIdx ) > = iter_blocks ( blocks)
1225+ . filter ( |( _, block) | {
1226+ block. cold
1227+ && block. next != BlockIdx :: NULL
1228+ && !blocks[ block. next . idx ( ) ] . cold
1229+ && block
1230+ . instructions
1231+ . last ( )
1232+ . map ( |ins| !ins. instr . is_scope_exit ( ) && !ins. instr . is_unconditional_jump ( ) )
1233+ . unwrap_or ( true )
1234+ } )
1235+ . map ( |( idx, block) | ( idx, block. next ) )
1236+ . collect ( ) ;
1237+
1238+ for ( cold_idx, warm_next) in fixups {
1239+ // Create a new block with an explicit jump
1240+ let jump_block_idx = BlockIdx ( blocks. len ( ) as u32 ) ;
1241+ let loc = blocks[ cold_idx. idx ( ) ]
1242+ . instructions
1243+ . last ( )
1244+ . map ( |i| i. location )
1245+ . unwrap_or_default ( ) ;
1246+ let end_loc = blocks[ cold_idx. idx ( ) ]
1247+ . instructions
1248+ . last ( )
1249+ . map ( |i| i. end_location )
1250+ . unwrap_or_default ( ) ;
1251+ let mut jump_block = Block :: default ( ) ;
1252+ jump_block. cold = true ;
1253+ jump_block. instructions . push ( InstructionInfo {
1254+ instr : PseudoInstruction :: JumpNoInterrupt {
1255+ target : Arg :: marker ( ) ,
1256+ }
1257+ . into ( ) ,
1258+ arg : OpArg :: new ( 0 ) ,
1259+ target : warm_next,
1260+ location : loc,
1261+ end_location : end_loc,
1262+ except_handler : None ,
1263+ lineno_override : None ,
1264+ } ) ;
1265+ jump_block. next = blocks[ cold_idx. idx ( ) ] . next ;
1266+ blocks[ cold_idx. idx ( ) ] . next = jump_block_idx;
1267+ blocks. push ( jump_block) ;
1268+ }
1269+
1270+ // Now reorder: extract cold block streaks and append at the end
1271+ let mut cold_head: BlockIdx = BlockIdx :: NULL ;
1272+ let mut cold_tail: BlockIdx = BlockIdx :: NULL ;
1273+
1274+ // Walk the chain, collect and remove cold blocks
1275+ let mut current = BlockIdx ( 0 ) ;
1276+ // Entry block should never be cold
1277+ assert ! ( !blocks[ 0 ] . cold) ;
1278+
1279+ while current != BlockIdx :: NULL {
1280+ let next = blocks[ current. idx ( ) ] . next ;
1281+ if next == BlockIdx :: NULL {
1282+ break ;
1283+ }
1284+
1285+ if blocks[ next. idx ( ) ] . cold {
1286+ // Start of a cold streak
1287+ let cold_start = next;
1288+ let mut cold_end = next;
1289+ while blocks[ cold_end. idx ( ) ] . next != BlockIdx :: NULL
1290+ && blocks[ blocks[ cold_end. idx ( ) ] . next . idx ( ) ] . cold
1291+ {
1292+ cold_end = blocks[ cold_end. idx ( ) ] . next ;
1293+ }
1294+
1295+ // Unlink cold streak from main chain
1296+ let after_cold = blocks[ cold_end. idx ( ) ] . next ;
1297+ blocks[ current. idx ( ) ] . next = after_cold;
1298+ blocks[ cold_end. idx ( ) ] . next = BlockIdx :: NULL ;
1299+
1300+ // Append to cold list
1301+ if cold_head == BlockIdx :: NULL {
1302+ cold_head = cold_start;
1303+ } else {
1304+ blocks[ cold_tail. idx ( ) ] . next = cold_start;
1305+ }
1306+ cold_tail = cold_end;
1307+
1308+ // Don't advance current - check the new next
1309+ } else {
1310+ current = next;
1311+ }
1312+ }
1313+
1314+ // Append cold blocks at the end of main chain
1315+ if cold_head != BlockIdx :: NULL {
1316+ // Find end of main chain
1317+ let mut last = current;
1318+ while blocks[ last. idx ( ) ] . next != BlockIdx :: NULL {
1319+ last = blocks[ last. idx ( ) ] . next ;
1320+ }
1321+ blocks[ last. idx ( ) ] . next = cold_head;
1322+ }
1323+ }
1324+
1325+ /// Returns true if the instruction is a conditional jump (POP_JUMP_IF_*).
1326+ fn is_conditional_jump ( instr : & AnyInstruction ) -> bool {
1327+ matches ! (
1328+ instr. real( ) ,
1329+ Some (
1330+ Instruction :: PopJumpIfFalse { .. }
1331+ | Instruction :: PopJumpIfTrue { .. }
1332+ | Instruction :: PopJumpIfNone { .. }
1333+ | Instruction :: PopJumpIfNotNone { .. }
1334+ )
1335+ )
1336+ }
1337+
1338+ /// Remove redundant unconditional jumps and add NOT_TAKEN after forward
1339+ /// conditional jumps (scanning all instructions in each block).
1340+ /// flowgraph.c normalize_jumps + remove_redundant_jumps
1341+ fn normalize_jumps ( blocks : & mut Vec < Block > ) {
1342+ // Walk linked list to determine visit order (for forward/backward detection)
1343+ let mut visit_order = Vec :: new ( ) ;
1344+ let mut visited = vec ! [ false ; blocks. len( ) ] ;
1345+ let mut current = BlockIdx ( 0 ) ;
1346+ while current != BlockIdx :: NULL {
1347+ visit_order. push ( current) ;
1348+ visited[ current. idx ( ) ] = true ;
1349+ current = blocks[ current. idx ( ) ] . next ;
1350+ }
1351+
1352+ // Reset visited for forward/backward detection during second pass
1353+ visited. fill ( false ) ;
1354+
1355+ for block_idx in visit_order {
1356+ let idx = block_idx. idx ( ) ;
1357+ visited[ idx] = true ;
1358+
1359+ // Remove redundant unconditional jump to next block
1360+ let next = blocks[ idx] . next ;
1361+ if next != BlockIdx :: NULL {
1362+ let last = blocks[ idx] . instructions . last ( ) ;
1363+ let is_jump_to_next = last. is_some_and ( |ins| {
1364+ ins. instr . is_unconditional_jump ( )
1365+ && ins. target != BlockIdx :: NULL
1366+ && ins. target == next
1367+ } ) ;
1368+ if is_jump_to_next {
1369+ if let Some ( last_instr) = blocks[ idx] . instructions . last_mut ( ) {
1370+ last_instr. instr = Instruction :: Nop . into ( ) ;
1371+ last_instr. target = BlockIdx :: NULL ;
1372+ }
1373+ }
1374+ }
1375+
1376+ // Collect positions where NOT_TAKEN should be inserted
1377+ let mut insert_positions: Vec < ( usize , InstructionInfo ) > = Vec :: new ( ) ;
1378+ for ( i, ins) in blocks[ idx] . instructions . iter ( ) . enumerate ( ) {
1379+ if is_conditional_jump ( & ins. instr )
1380+ && ins. target != BlockIdx :: NULL
1381+ && !visited[ ins. target . idx ( ) ]
1382+ {
1383+ insert_positions. push ( (
1384+ i + 1 ,
1385+ InstructionInfo {
1386+ instr : Instruction :: NotTaken . into ( ) ,
1387+ arg : OpArg :: new ( 0 ) ,
1388+ target : BlockIdx :: NULL ,
1389+ location : ins. location ,
1390+ end_location : ins. end_location ,
1391+ except_handler : ins. except_handler ,
1392+ lineno_override : None ,
1393+ } ,
1394+ ) ) ;
1395+ }
1396+ }
1397+
1398+ // Insert NOT_TAKEN in reverse order to preserve indices
1399+ for ( pos, info) in insert_positions. into_iter ( ) . rev ( ) {
1400+ blocks[ idx] . instructions . insert ( pos, info) ;
1401+ }
1402+ }
1403+ }
1404+
11571405/// Label exception targets: walk CFG with except stack, set per-instruction
11581406/// handler info and block preserve_lasti flag. Converts POP_BLOCK to NOP.
11591407/// flowgraph.c label_exception_targets + push_except_block
0 commit comments