1/* CPU mode switching 2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 3 Free Software Foundation, Inc. 4 5This file is part of GCC. 6 7GCC is free software; you can redistribute it and/or modify it under 8the terms of the GNU General Public License as published by the Free 9Software Foundation; either version 2, or (at your option) any later 10version. 11 12GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13WARRANTY; without even the implied warranty of MERCHANTABILITY or 14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15for more details. 16 17You should have received a copy of the GNU General Public License 18along with GCC; see the file COPYING. If not, write to the Free 19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 2002110-1301, USA. */ 21 22#include "config.h" 23#include "system.h" 24#include "coretypes.h" 25#include "tm.h" 26#include "rtl.h" 27#include "regs.h" 28#include "hard-reg-set.h" 29#include "flags.h" 30#include "real.h" 31#include "insn-config.h" 32#include "recog.h" 33#include "basic-block.h" 34#include "output.h" 35#include "tm_p.h" 36#include "function.h" 37#include "tree-pass.h" 38#include "timevar.h" 39 40/* We want target macros for the mode switching code to be able to refer 41 to instruction attribute values. */ 42#include "insn-attr.h" 43 44#ifdef OPTIMIZE_MODE_SWITCHING 45 46/* The algorithm for setting the modes consists of scanning the insn list 47 and finding all the insns which require a specific mode. Each insn gets 48 a unique struct seginfo element. These structures are inserted into a list 49 for each basic block. For each entity, there is an array of bb_info over 50 the flow graph basic blocks (local var 'bb_info'), and contains a list 51 of all insns within that basic block, in the order they are encountered. 52 53 For each entity, any basic block WITHOUT any insns requiring a specific 54 mode are given a single entry, without a mode. (Each basic block 55 in the flow graph must have at least one entry in the segment table.) 56 57 The LCM algorithm is then run over the flow graph to determine where to 58 place the sets to the highest-priority value in respect of first the first 59 insn in any one block. Any adjustments required to the transparency 60 vectors are made, then the next iteration starts for the next-lower 61 priority mode, till for each entity all modes are exhausted. 62 63 More details are located in the code for optimize_mode_switching(). */ 64 65/* This structure contains the information for each insn which requires 66 either single or double mode to be set. 67 MODE is the mode this insn must be executed in. 68 INSN_PTR is the insn to be executed (may be the note that marks the 69 beginning of a basic block). 70 BBNUM is the flow graph basic block this insn occurs in. 71 NEXT is the next insn in the same basic block. */ 72struct seginfo 73{ 74 int mode; 75 rtx insn_ptr; 76 int bbnum; 77 struct seginfo *next; 78 HARD_REG_SET regs_live; 79}; 80 81struct bb_info 82{ 83 struct seginfo *seginfo; 84 int computing; 85}; 86 87/* These bitmaps are used for the LCM algorithm. */ 88 89static sbitmap *antic; 90static sbitmap *transp; 91static sbitmap *comp; 92 93static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET); 94static void add_seginfo (struct bb_info *, struct seginfo *); 95static void reg_dies (rtx, HARD_REG_SET); 96static void reg_becomes_live (rtx, rtx, void *); 97static void make_preds_opaque (basic_block, int); 98 99 100/* This function will allocate a new BBINFO structure, initialized 101 with the MODE, INSN, and basic block BB parameters. */ 102 103static struct seginfo * 104new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live) 105{ 106 struct seginfo *ptr; 107 ptr = XNEW (struct seginfo); 108 ptr->mode = mode; 109 ptr->insn_ptr = insn; 110 ptr->bbnum = bb; 111 ptr->next = NULL; 112 COPY_HARD_REG_SET (ptr->regs_live, regs_live); 113 return ptr; 114} 115 116/* Add a seginfo element to the end of a list. 117 HEAD is a pointer to the list beginning. 118 INFO is the structure to be linked in. */ 119 120static void 121add_seginfo (struct bb_info *head, struct seginfo *info) 122{ 123 struct seginfo *ptr; 124 125 if (head->seginfo == NULL) 126 head->seginfo = info; 127 else 128 { 129 ptr = head->seginfo; 130 while (ptr->next != NULL) 131 ptr = ptr->next; 132 ptr->next = info; 133 } 134} 135 136/* Make all predecessors of basic block B opaque, recursively, till we hit 137 some that are already non-transparent, or an edge where aux is set; that 138 denotes that a mode set is to be done on that edge. 139 J is the bit number in the bitmaps that corresponds to the entity that 140 we are currently handling mode-switching for. */ 141 142static void 143make_preds_opaque (basic_block b, int j) 144{ 145 edge e; 146 edge_iterator ei; 147 148 FOR_EACH_EDGE (e, ei, b->preds) 149 { 150 basic_block pb = e->src; 151 152 if (e->aux || ! TEST_BIT (transp[pb->index], j)) 153 continue; 154 155 RESET_BIT (transp[pb->index], j); 156 make_preds_opaque (pb, j); 157 } 158} 159 160/* Record in LIVE that register REG died. */ 161 162static void 163reg_dies (rtx reg, HARD_REG_SET live) 164{ 165 int regno, nregs; 166 167 if (!REG_P (reg)) 168 return; 169 170 regno = REGNO (reg); 171 if (regno < FIRST_PSEUDO_REGISTER) 172 for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0; 173 nregs--) 174 CLEAR_HARD_REG_BIT (live, regno + nregs); 175} 176 177/* Record in LIVE that register REG became live. 178 This is called via note_stores. */ 179 180static void 181reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *live) 182{ 183 int regno, nregs; 184 185 if (GET_CODE (reg) == SUBREG) 186 reg = SUBREG_REG (reg); 187 188 if (!REG_P (reg)) 189 return; 190 191 regno = REGNO (reg); 192 if (regno < FIRST_PSEUDO_REGISTER) 193 for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0; 194 nregs--) 195 SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs); 196} 197 198/* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined 199 and vice versa. */ 200#if defined (MODE_ENTRY) != defined (MODE_EXIT) 201 #error "Both MODE_ENTRY and MODE_EXIT must be defined" 202#endif 203 204#if defined (MODE_ENTRY) && defined (MODE_EXIT) 205/* Split the fallthrough edge to the exit block, so that we can note 206 that there NORMAL_MODE is required. Return the new block if it's 207 inserted before the exit block. Otherwise return null. */ 208 209static basic_block 210create_pre_exit (int n_entities, int *entity_map, const int *num_modes) 211{ 212 edge eg; 213 edge_iterator ei; 214 basic_block pre_exit; 215 216 /* The only non-call predecessor at this stage is a block with a 217 fallthrough edge; there can be at most one, but there could be 218 none at all, e.g. when exit is called. */ 219 pre_exit = 0; 220 FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds) 221 if (eg->flags & EDGE_FALLTHRU) 222 { 223 basic_block src_bb = eg->src; 224 regset live_at_end = src_bb->il.rtl->global_live_at_end; 225 rtx last_insn, ret_reg; 226 227 gcc_assert (!pre_exit); 228 /* If this function returns a value at the end, we have to 229 insert the final mode switch before the return value copy 230 to its hard register. */ 231 if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1 232 && NONJUMP_INSN_P ((last_insn = BB_END (src_bb))) 233 && GET_CODE (PATTERN (last_insn)) == USE 234 && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG) 235 { 236 int ret_start = REGNO (ret_reg); 237 int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)]; 238 int ret_end = ret_start + nregs; 239 int short_block = 0; 240 int maybe_builtin_apply = 0; 241 int forced_late_switch = 0; 242 rtx before_return_copy; 243 244 do 245 { 246 rtx return_copy = PREV_INSN (last_insn); 247 rtx return_copy_pat, copy_reg; 248 int copy_start, copy_num; 249 int j; 250 251 if (INSN_P (return_copy)) 252 { 253 if (GET_CODE (PATTERN (return_copy)) == USE 254 && GET_CODE (XEXP (PATTERN (return_copy), 0)) == REG 255 && (FUNCTION_VALUE_REGNO_P 256 (REGNO (XEXP (PATTERN (return_copy), 0))))) 257 { 258 maybe_builtin_apply = 1; 259 last_insn = return_copy; 260 continue; 261 } 262 /* If the return register is not (in its entirety) 263 likely spilled, the return copy might be 264 partially or completely optimized away. */ 265 return_copy_pat = single_set (return_copy); 266 if (!return_copy_pat) 267 { 268 return_copy_pat = PATTERN (return_copy); 269 if (GET_CODE (return_copy_pat) != CLOBBER) 270 break; 271 } 272 copy_reg = SET_DEST (return_copy_pat); 273 if (GET_CODE (copy_reg) == REG) 274 copy_start = REGNO (copy_reg); 275 else if (GET_CODE (copy_reg) == SUBREG 276 && GET_CODE (SUBREG_REG (copy_reg)) == REG) 277 copy_start = REGNO (SUBREG_REG (copy_reg)); 278 else 279 break; 280 if (copy_start >= FIRST_PSEUDO_REGISTER) 281 break; 282 copy_num 283 = hard_regno_nregs[copy_start][GET_MODE (copy_reg)]; 284 285 /* If the return register is not likely spilled, - as is 286 the case for floating point on SH4 - then it might 287 be set by an arithmetic operation that needs a 288 different mode than the exit block. */ 289 for (j = n_entities - 1; j >= 0; j--) 290 { 291 int e = entity_map[j]; 292 int mode = MODE_NEEDED (e, return_copy); 293 294 if (mode != num_modes[e] && mode != MODE_EXIT (e)) 295 break; 296 } 297 if (j >= 0) 298 { 299 /* For the SH4, floating point loads depend on fpscr, 300 thus we might need to put the final mode switch 301 after the return value copy. That is still OK, 302 because a floating point return value does not 303 conflict with address reloads. */ 304 if (copy_start >= ret_start 305 && copy_start + copy_num <= ret_end 306 && OBJECT_P (SET_SRC (return_copy_pat))) 307 forced_late_switch = 1; 308 break; 309 } 310 311 if (copy_start >= ret_start 312 && copy_start + copy_num <= ret_end) 313 nregs -= copy_num; 314 else if (!maybe_builtin_apply 315 || !FUNCTION_VALUE_REGNO_P (copy_start)) 316 break; 317 last_insn = return_copy; 318 } 319 /* ??? Exception handling can lead to the return value 320 copy being already separated from the return value use, 321 as in unwind-dw2.c . 322 Similarly, conditionally returning without a value, 323 and conditionally using builtin_return can lead to an 324 isolated use. */ 325 if (return_copy == BB_HEAD (src_bb)) 326 { 327 short_block = 1; 328 break; 329 } 330 last_insn = return_copy; 331 } 332 while (nregs); 333 334 /* If we didn't see a full return value copy, verify that there 335 is a plausible reason for this. If some, but not all of the 336 return register is likely spilled, we can expect that there 337 is a copy for the likely spilled part. */ 338 gcc_assert (!nregs 339 || forced_late_switch 340 || short_block 341 || !(CLASS_LIKELY_SPILLED_P 342 (REGNO_REG_CLASS (ret_start))) 343 || (nregs 344 != hard_regno_nregs[ret_start][GET_MODE (ret_reg)]) 345 /* For multi-hard-register floating point 346 values, sometimes the likely-spilled part 347 is ordinarily copied first, then the other 348 part is set with an arithmetic operation. 349 This doesn't actually cause reload 350 failures, so let it pass. */ 351 || (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT 352 && nregs != 1)); 353 354 if (INSN_P (last_insn)) 355 { 356 before_return_copy 357 = emit_note_before (NOTE_INSN_DELETED, last_insn); 358 /* Instructions preceding LAST_INSN in the same block might 359 require a different mode than MODE_EXIT, so if we might 360 have such instructions, keep them in a separate block 361 from pre_exit. */ 362 if (last_insn != BB_HEAD (src_bb)) 363 src_bb = split_block (src_bb, 364 PREV_INSN (before_return_copy))->dest; 365 } 366 else 367 before_return_copy = last_insn; 368 pre_exit = split_block (src_bb, before_return_copy)->src; 369 } 370 else 371 { 372 pre_exit = split_edge (eg); 373 COPY_REG_SET (pre_exit->il.rtl->global_live_at_start, live_at_end); 374 COPY_REG_SET (pre_exit->il.rtl->global_live_at_end, live_at_end); 375 } 376 } 377 378 return pre_exit; 379} 380#endif 381 382/* Find all insns that need a particular mode setting, and insert the 383 necessary mode switches. Return true if we did work. */ 384 385static int 386optimize_mode_switching (void) 387{ 388 rtx insn; 389 int e; 390 basic_block bb; 391 int need_commit = 0; 392 sbitmap *kill; 393 struct edge_list *edge_list; 394 static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING; 395#define N_ENTITIES ARRAY_SIZE (num_modes) 396 int entity_map[N_ENTITIES]; 397 struct bb_info *bb_info[N_ENTITIES]; 398 int i, j; 399 int n_entities; 400 int max_num_modes = 0; 401 bool emited = false; 402 basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED; 403 404 clear_bb_flags (); 405 406 for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--) 407 if (OPTIMIZE_MODE_SWITCHING (e)) 408 { 409 int entry_exit_extra = 0; 410 411 /* Create the list of segments within each basic block. 412 If NORMAL_MODE is defined, allow for two extra 413 blocks split from the entry and exit block. */ 414#if defined (MODE_ENTRY) && defined (MODE_EXIT) 415 entry_exit_extra = 3; 416#endif 417 bb_info[n_entities] 418 = XCNEWVEC (struct bb_info, last_basic_block + entry_exit_extra); 419 entity_map[n_entities++] = e; 420 if (num_modes[e] > max_num_modes) 421 max_num_modes = num_modes[e]; 422 } 423 424 if (! n_entities) 425 return 0; 426 427#if defined (MODE_ENTRY) && defined (MODE_EXIT) 428 /* Split the edge from the entry block, so that we can note that 429 there NORMAL_MODE is supplied. */ 430 post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR)); 431 pre_exit = create_pre_exit (n_entities, entity_map, num_modes); 432#endif 433 434 /* Create the bitmap vectors. */ 435 436 antic = sbitmap_vector_alloc (last_basic_block, n_entities); 437 transp = sbitmap_vector_alloc (last_basic_block, n_entities); 438 comp = sbitmap_vector_alloc (last_basic_block, n_entities); 439 440 sbitmap_vector_ones (transp, last_basic_block); 441 442 for (j = n_entities - 1; j >= 0; j--) 443 { 444 int e = entity_map[j]; 445 int no_mode = num_modes[e]; 446 struct bb_info *info = bb_info[j]; 447 448 /* Determine what the first use (if any) need for a mode of entity E is. 449 This will be the mode that is anticipatable for this block. 450 Also compute the initial transparency settings. */ 451 FOR_EACH_BB (bb) 452 { 453 struct seginfo *ptr; 454 int last_mode = no_mode; 455 HARD_REG_SET live_now; 456 457 REG_SET_TO_HARD_REG_SET (live_now, 458 bb->il.rtl->global_live_at_start); 459 460 /* Pretend the mode is clobbered across abnormal edges. */ 461 { 462 edge_iterator ei; 463 edge e; 464 FOR_EACH_EDGE (e, ei, bb->preds) 465 if (e->flags & EDGE_COMPLEX) 466 break; 467 if (e) 468 { 469 ptr = new_seginfo (no_mode, BB_HEAD (bb), bb->index, live_now); 470 add_seginfo (info + bb->index, ptr); 471 RESET_BIT (transp[bb->index], j); 472 } 473 } 474 475 for (insn = BB_HEAD (bb); 476 insn != NULL && insn != NEXT_INSN (BB_END (bb)); 477 insn = NEXT_INSN (insn)) 478 { 479 if (INSN_P (insn)) 480 { 481 int mode = MODE_NEEDED (e, insn); 482 rtx link; 483 484 if (mode != no_mode && mode != last_mode) 485 { 486 last_mode = mode; 487 ptr = new_seginfo (mode, insn, bb->index, live_now); 488 add_seginfo (info + bb->index, ptr); 489 RESET_BIT (transp[bb->index], j); 490 } 491#ifdef MODE_AFTER 492 last_mode = MODE_AFTER (last_mode, insn); 493#endif 494 /* Update LIVE_NOW. */ 495 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 496 if (REG_NOTE_KIND (link) == REG_DEAD) 497 reg_dies (XEXP (link, 0), live_now); 498 499 note_stores (PATTERN (insn), reg_becomes_live, &live_now); 500 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 501 if (REG_NOTE_KIND (link) == REG_UNUSED) 502 reg_dies (XEXP (link, 0), live_now); 503 } 504 } 505 506 info[bb->index].computing = last_mode; 507 /* Check for blocks without ANY mode requirements. */ 508 if (last_mode == no_mode) 509 { 510 ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now); 511 add_seginfo (info + bb->index, ptr); 512 } 513 } 514#if defined (MODE_ENTRY) && defined (MODE_EXIT) 515 { 516 int mode = MODE_ENTRY (e); 517 518 if (mode != no_mode) 519 { 520 bb = post_entry; 521 522 /* By always making this nontransparent, we save 523 an extra check in make_preds_opaque. We also 524 need this to avoid confusing pre_edge_lcm when 525 antic is cleared but transp and comp are set. */ 526 RESET_BIT (transp[bb->index], j); 527 528 /* Insert a fake computing definition of MODE into entry 529 blocks which compute no mode. This represents the mode on 530 entry. */ 531 info[bb->index].computing = mode; 532 533 if (pre_exit) 534 info[pre_exit->index].seginfo->mode = MODE_EXIT (e); 535 } 536 } 537#endif /* NORMAL_MODE */ 538 } 539 540 kill = sbitmap_vector_alloc (last_basic_block, n_entities); 541 for (i = 0; i < max_num_modes; i++) 542 { 543 int current_mode[N_ENTITIES]; 544 sbitmap *delete; 545 sbitmap *insert; 546 547 /* Set the anticipatable and computing arrays. */ 548 sbitmap_vector_zero (antic, last_basic_block); 549 sbitmap_vector_zero (comp, last_basic_block); 550 for (j = n_entities - 1; j >= 0; j--) 551 { 552 int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i); 553 struct bb_info *info = bb_info[j]; 554 555 FOR_EACH_BB (bb) 556 { 557 if (info[bb->index].seginfo->mode == m) 558 SET_BIT (antic[bb->index], j); 559 560 if (info[bb->index].computing == m) 561 SET_BIT (comp[bb->index], j); 562 } 563 } 564 565 /* Calculate the optimal locations for the 566 placement mode switches to modes with priority I. */ 567 568 FOR_EACH_BB (bb) 569 sbitmap_not (kill[bb->index], transp[bb->index]); 570 edge_list = pre_edge_lcm (n_entities, transp, comp, antic, 571 kill, &insert, &delete); 572 573 for (j = n_entities - 1; j >= 0; j--) 574 { 575 /* Insert all mode sets that have been inserted by lcm. */ 576 int no_mode = num_modes[entity_map[j]]; 577 578 /* Wherever we have moved a mode setting upwards in the flow graph, 579 the blocks between the new setting site and the now redundant 580 computation ceases to be transparent for any lower-priority 581 mode of the same entity. First set the aux field of each 582 insertion site edge non-transparent, then propagate the new 583 non-transparency from the redundant computation upwards till 584 we hit an insertion site or an already non-transparent block. */ 585 for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--) 586 { 587 edge eg = INDEX_EDGE (edge_list, e); 588 int mode; 589 basic_block src_bb; 590 HARD_REG_SET live_at_edge; 591 rtx mode_set; 592 593 eg->aux = 0; 594 595 if (! TEST_BIT (insert[e], j)) 596 continue; 597 598 eg->aux = (void *)1; 599 600 mode = current_mode[j]; 601 src_bb = eg->src; 602 603 REG_SET_TO_HARD_REG_SET (live_at_edge, 604 src_bb->il.rtl->global_live_at_end); 605 606 start_sequence (); 607 EMIT_MODE_SET (entity_map[j], mode, live_at_edge); 608 mode_set = get_insns (); 609 end_sequence (); 610 611 /* Do not bother to insert empty sequence. */ 612 if (mode_set == NULL_RTX) 613 continue; 614 615 /* We should not get an abnormal edge here. */ 616 gcc_assert (! (eg->flags & EDGE_ABNORMAL)); 617 618 need_commit = 1; 619 insert_insn_on_edge (mode_set, eg); 620 } 621 622 FOR_EACH_BB_REVERSE (bb) 623 if (TEST_BIT (delete[bb->index], j)) 624 { 625 make_preds_opaque (bb, j); 626 /* Cancel the 'deleted' mode set. */ 627 bb_info[j][bb->index].seginfo->mode = no_mode; 628 } 629 } 630 631 sbitmap_vector_free (delete); 632 sbitmap_vector_free (insert); 633 clear_aux_for_edges (); 634 free_edge_list (edge_list); 635 } 636 637 /* Now output the remaining mode sets in all the segments. */ 638 for (j = n_entities - 1; j >= 0; j--) 639 { 640 int no_mode = num_modes[entity_map[j]]; 641 642 FOR_EACH_BB_REVERSE (bb) 643 { 644 struct seginfo *ptr, *next; 645 for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next) 646 { 647 next = ptr->next; 648 if (ptr->mode != no_mode) 649 { 650 rtx mode_set; 651 652 start_sequence (); 653 EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live); 654 mode_set = get_insns (); 655 end_sequence (); 656 657 /* Insert MODE_SET only if it is nonempty. */ 658 if (mode_set != NULL_RTX) 659 { 660 emited = true; 661 if (NOTE_P (ptr->insn_ptr) 662 && (NOTE_LINE_NUMBER (ptr->insn_ptr) 663 == NOTE_INSN_BASIC_BLOCK)) 664 emit_insn_after (mode_set, ptr->insn_ptr); 665 else 666 emit_insn_before (mode_set, ptr->insn_ptr); 667 } 668 } 669 670 free (ptr); 671 } 672 } 673 674 free (bb_info[j]); 675 } 676 677 /* Finished. Free up all the things we've allocated. */ 678 679 sbitmap_vector_free (kill); 680 sbitmap_vector_free (antic); 681 sbitmap_vector_free (transp); 682 sbitmap_vector_free (comp); 683 684 if (need_commit) 685 commit_edge_insertions (); 686 687#if defined (MODE_ENTRY) && defined (MODE_EXIT) 688 cleanup_cfg (CLEANUP_NO_INSN_DEL); 689#else 690 if (!need_commit && !emited) 691 return 0; 692#endif 693 694 max_regno = max_reg_num (); 695 allocate_reg_info (max_regno, FALSE, FALSE); 696 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES, 697 (PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE 698 | PROP_SCAN_DEAD_CODE)); 699 700 return 1; 701} 702 703#endif /* OPTIMIZE_MODE_SWITCHING */ 704 705static bool 706gate_mode_switching (void) 707{ 708#ifdef OPTIMIZE_MODE_SWITCHING 709 return true; 710#else 711 return false; 712#endif 713} 714 715static unsigned int 716rest_of_handle_mode_switching (void) 717{ 718#ifdef OPTIMIZE_MODE_SWITCHING 719 no_new_pseudos = 0; 720 optimize_mode_switching (); 721 no_new_pseudos = 1; 722#endif /* OPTIMIZE_MODE_SWITCHING */ 723 return 0; 724} 725 726 727struct tree_opt_pass pass_mode_switching = 728{ 729 "mode-sw", /* name */ 730 gate_mode_switching, /* gate */ 731 rest_of_handle_mode_switching, /* execute */ 732 NULL, /* sub */ 733 NULL, /* next */ 734 0, /* static_pass_number */ 735 TV_MODE_SWITCH, /* tv_id */ 736 0, /* properties_required */ 737 0, /* properties_provided */ 738 0, /* properties_destroyed */ 739 0, /* todo_flags_start */ 740 TODO_dump_func, /* todo_flags_finish */ 741 0 /* letter */ 742}; 743