flow.c revision 72562
1/* Data flow analysis for GNU compiler. 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 3 1999, 2000 Free Software Foundation, Inc. 4 5This file is part of GNU CC. 6 7GNU CC is free software; you can redistribute it and/or modify 8it under the terms of the GNU General Public License as published by 9the Free Software Foundation; either version 2, or (at your option) 10any later version. 11 12GNU CC is distributed in the hope that it will be useful, 13but WITHOUT ANY WARRANTY; without even the implied warranty of 14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15GNU General Public License for more details. 16 17You should have received a copy of the GNU General Public License 18along with GNU CC; see the file COPYING. If not, write to 19the Free Software Foundation, 59 Temple Place - Suite 330, 20Boston, MA 02111-1307, USA. */ 21 22 23/* This file contains the data flow analysis pass of the compiler. It 24 computes data flow information which tells combine_instructions 25 which insns to consider combining and controls register allocation. 26 27 Additional data flow information that is too bulky to record is 28 generated during the analysis, and is used at that time to create 29 autoincrement and autodecrement addressing. 30 31 The first step is dividing the function into basic blocks. 32 find_basic_blocks does this. Then life_analysis determines 33 where each register is live and where it is dead. 34 35 ** find_basic_blocks ** 36 37 find_basic_blocks divides the current function's rtl into basic 38 blocks and constructs the CFG. The blocks are recorded in the 39 basic_block_info array; the CFG exists in the edge structures 40 referenced by the blocks. 41 42 find_basic_blocks also finds any unreachable loops and deletes them. 43 44 ** life_analysis ** 45 46 life_analysis is called immediately after find_basic_blocks. 47 It uses the basic block information to determine where each 48 hard or pseudo register is live. 49 50 ** live-register info ** 51 52 The information about where each register is live is in two parts: 53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start. 54 55 basic_block->global_live_at_start has an element for each basic 56 block, and the element is a bit-vector with a bit for each hard or 57 pseudo register. The bit is 1 if the register is live at the 58 beginning of the basic block. 59 60 Two types of elements can be added to an insn's REG_NOTES. 61 A REG_DEAD note is added to an insn's REG_NOTES for any register 62 that meets both of two conditions: The value in the register is not 63 needed in subsequent insns and the insn does not replace the value in 64 the register (in the case of multi-word hard registers, the value in 65 each register must be replaced by the insn to avoid a REG_DEAD note). 66 67 In the vast majority of cases, an object in a REG_DEAD note will be 68 used somewhere in the insn. The (rare) exception to this is if an 69 insn uses a multi-word hard register and only some of the registers are 70 needed in subsequent insns. In that case, REG_DEAD notes will be 71 provided for those hard registers that are not subsequently needed. 72 Partial REG_DEAD notes of this type do not occur when an insn sets 73 only some of the hard registers used in such a multi-word operand; 74 omitting REG_DEAD notes for objects stored in an insn is optional and 75 the desire to do so does not justify the complexity of the partial 76 REG_DEAD notes. 77 78 REG_UNUSED notes are added for each register that is set by the insn 79 but is unused subsequently (if every register set by the insn is unused 80 and the insn does not reference memory or have some other side-effect, 81 the insn is deleted instead). If only part of a multi-word hard 82 register is used in a subsequent insn, REG_UNUSED notes are made for 83 the parts that will not be used. 84 85 To determine which registers are live after any insn, one can 86 start from the beginning of the basic block and scan insns, noting 87 which registers are set by each insn and which die there. 88 89 ** Other actions of life_analysis ** 90 91 life_analysis sets up the LOG_LINKS fields of insns because the 92 information needed to do so is readily available. 93 94 life_analysis deletes insns whose only effect is to store a value 95 that is never used. 96 97 life_analysis notices cases where a reference to a register as 98 a memory address can be combined with a preceding or following 99 incrementation or decrementation of the register. The separate 100 instruction to increment or decrement is deleted and the address 101 is changed to a POST_INC or similar rtx. 102 103 Each time an incrementing or decrementing address is created, 104 a REG_INC element is added to the insn's REG_NOTES list. 105 106 life_analysis fills in certain vectors containing information about 107 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length, 108 reg_n_calls_crosses and reg_basic_block. 109 110 life_analysis sets current_function_sp_is_unchanging if the function 111 doesn't modify the stack pointer. */ 112 113/* TODO: 114 115 Split out from life_analysis: 116 - local property discovery (bb->local_live, bb->local_set) 117 - global property computation 118 - log links creation 119 - pre/post modify transformation 120*/ 121 122#include "config.h" 123#include "system.h" 124#include "rtl.h" 125#include "basic-block.h" 126#include "insn-config.h" 127#include "regs.h" 128#include "hard-reg-set.h" 129#include "flags.h" 130#include "output.h" 131#include "except.h" 132#include "toplev.h" 133#include "recog.h" 134#include "insn-flags.h" 135 136#include "obstack.h" 137#define obstack_chunk_alloc xmalloc 138#define obstack_chunk_free free 139 140 141/* EXIT_IGNORE_STACK should be nonzero if, when returning from a function, 142 the stack pointer does not matter. The value is tested only in 143 functions that have frame pointers. 144 No definition is equivalent to always zero. */ 145#ifndef EXIT_IGNORE_STACK 146#define EXIT_IGNORE_STACK 0 147#endif 148 149 150/* The contents of the current function definition are allocated 151 in this obstack, and all are freed at the end of the function. 152 For top-level functions, this is temporary_obstack. 153 Separate obstacks are made for nested functions. */ 154 155extern struct obstack *function_obstack; 156 157/* List of labels that must never be deleted. */ 158extern rtx forced_labels; 159 160/* Number of basic blocks in the current function. */ 161 162int n_basic_blocks; 163 164/* The basic block array. */ 165 166varray_type basic_block_info; 167 168/* The special entry and exit blocks. */ 169 170struct basic_block_def entry_exit_blocks[2] = 171{ 172 { 173 NULL, /* head */ 174 NULL, /* end */ 175 NULL, /* pred */ 176 NULL, /* succ */ 177 NULL, /* local_set */ 178 NULL, /* global_live_at_start */ 179 NULL, /* global_live_at_end */ 180 NULL, /* aux */ 181 ENTRY_BLOCK, /* index */ 182 0 /* loop_depth */ 183 }, 184 { 185 NULL, /* head */ 186 NULL, /* end */ 187 NULL, /* pred */ 188 NULL, /* succ */ 189 NULL, /* local_set */ 190 NULL, /* global_live_at_start */ 191 NULL, /* global_live_at_end */ 192 NULL, /* aux */ 193 EXIT_BLOCK, /* index */ 194 0 /* loop_depth */ 195 } 196}; 197 198/* Nonzero if the second flow pass has completed. */ 199int flow2_completed; 200 201/* Maximum register number used in this function, plus one. */ 202 203int max_regno; 204 205/* Indexed by n, giving various register information */ 206 207varray_type reg_n_info; 208 209/* Size of the reg_n_info table. */ 210 211unsigned int reg_n_max; 212 213/* Element N is the next insn that uses (hard or pseudo) register number N 214 within the current basic block; or zero, if there is no such insn. 215 This is valid only during the final backward scan in propagate_block. */ 216 217static rtx *reg_next_use; 218 219/* Size of a regset for the current function, 220 in (1) bytes and (2) elements. */ 221 222int regset_bytes; 223int regset_size; 224 225/* Regset of regs live when calls to `setjmp'-like functions happen. */ 226/* ??? Does this exist only for the setjmp-clobbered warning message? */ 227 228regset regs_live_at_setjmp; 229 230/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers 231 that have to go in the same hard reg. 232 The first two regs in the list are a pair, and the next two 233 are another pair, etc. */ 234rtx regs_may_share; 235 236/* Depth within loops of basic block being scanned for lifetime analysis, 237 plus one. This is the weight attached to references to registers. */ 238 239static int loop_depth; 240 241/* During propagate_block, this is non-zero if the value of CC0 is live. */ 242 243static int cc0_live; 244 245/* During propagate_block, this contains a list of all the MEMs we are 246 tracking for dead store elimination. 247 248 ?!? Note we leak memory by not free-ing items on this list. We need to 249 write some generic routines to operate on memory lists since cse, gcse, 250 loop, sched, flow and possibly other passes all need to do basically the 251 same operations on these lists. */ 252 253static rtx mem_set_list; 254 255/* Set of registers that may be eliminable. These are handled specially 256 in updating regs_ever_live. */ 257 258static HARD_REG_SET elim_reg_set; 259 260/* The basic block structure for every insn, indexed by uid. */ 261 262varray_type basic_block_for_insn; 263 264/* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */ 265/* ??? Should probably be using LABEL_NUSES instead. It would take a 266 bit of surgery to be able to use or co-opt the routines in jump. */ 267 268static rtx label_value_list; 269 270/* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */ 271 272#define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN)) 273#define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN)) 274static bitmap uid_volatile; 275 276/* Forward declarations */ 277static int count_basic_blocks PROTO((rtx)); 278static rtx find_basic_blocks_1 PROTO((rtx, rtx*)); 279static void create_basic_block PROTO((int, rtx, rtx, rtx)); 280static void compute_bb_for_insn PROTO((varray_type, int)); 281static void clear_edges PROTO((void)); 282static void make_edges PROTO((rtx, rtx*)); 283static void make_edge PROTO((basic_block, basic_block, int)); 284static void make_label_edge PROTO((basic_block, rtx, int)); 285static void mark_critical_edges PROTO((void)); 286 287static void commit_one_edge_insertion PROTO((edge)); 288 289static void delete_unreachable_blocks PROTO((void)); 290static void delete_eh_regions PROTO((void)); 291static int can_delete_note_p PROTO((rtx)); 292static void delete_insn_chain PROTO((rtx, rtx)); 293static int delete_block PROTO((basic_block)); 294static void expunge_block PROTO((basic_block)); 295static rtx flow_delete_insn PROTO((rtx)); 296static int can_delete_label_p PROTO((rtx)); 297static void merge_blocks_nomove PROTO((basic_block, basic_block)); 298static int merge_blocks PROTO((edge,basic_block,basic_block)); 299static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block)); 300static void calculate_loop_depth PROTO((rtx)); 301 302static int set_noop_p PROTO((rtx)); 303static int noop_move_p PROTO((rtx)); 304static void notice_stack_pointer_modification PROTO ((rtx, rtx)); 305static void record_volatile_insns PROTO((rtx)); 306static void mark_regs_live_at_end PROTO((regset)); 307static void life_analysis_1 PROTO((rtx, int, int)); 308static void init_regset_vector PROTO ((regset *, int, 309 struct obstack *)); 310static void propagate_block PROTO((regset, rtx, rtx, int, 311 regset, int, int)); 312static int insn_dead_p PROTO((rtx, regset, int, rtx)); 313static int libcall_dead_p PROTO((rtx, regset, rtx, rtx)); 314static void mark_set_regs PROTO((regset, regset, rtx, 315 rtx, regset)); 316static void mark_set_1 PROTO((regset, regset, rtx, 317 rtx, regset)); 318#ifdef AUTO_INC_DEC 319static void find_auto_inc PROTO((regset, rtx, rtx)); 320static int try_pre_increment_1 PROTO((rtx)); 321static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT)); 322#endif 323static void mark_used_regs PROTO((regset, regset, rtx, int, rtx)); 324void dump_flow_info PROTO((FILE *)); 325static void dump_edge_info PROTO((FILE *, edge, int)); 326 327static int_list_ptr alloc_int_list_node PROTO ((int_list_block **)); 328static int_list_ptr add_int_list_node PROTO ((int_list_block **, 329 int_list **, int)); 330 331static void add_pred_succ PROTO ((int, int, int_list_ptr *, 332 int_list_ptr *, int *, int *)); 333 334static void count_reg_sets_1 PROTO ((rtx)); 335static void count_reg_sets PROTO ((rtx)); 336static void count_reg_references PROTO ((rtx)); 337static void notice_stack_pointer_modification PROTO ((rtx, rtx)); 338static void invalidate_mems_from_autoinc PROTO ((rtx)); 339void verify_flow_info PROTO ((void)); 340 341/* Find basic blocks of the current function. 342 F is the first insn of the function and NREGS the number of register 343 numbers in use. */ 344 345void 346find_basic_blocks (f, nregs, file, do_cleanup) 347 rtx f; 348 int nregs ATTRIBUTE_UNUSED; 349 FILE *file ATTRIBUTE_UNUSED; 350 int do_cleanup; 351{ 352 rtx *bb_eh_end; 353 int max_uid; 354 355 /* Flush out existing data. */ 356 if (basic_block_info != NULL) 357 { 358 int i; 359 360 clear_edges (); 361 362 /* Clear bb->aux on all extant basic blocks. We'll use this as a 363 tag for reuse during create_basic_block, just in case some pass 364 copies around basic block notes improperly. */ 365 for (i = 0; i < n_basic_blocks; ++i) 366 BASIC_BLOCK (i)->aux = NULL; 367 368 VARRAY_FREE (basic_block_info); 369 } 370 371 n_basic_blocks = count_basic_blocks (f); 372 373 /* Size the basic block table. The actual structures will be allocated 374 by find_basic_blocks_1, since we want to keep the structure pointers 375 stable across calls to find_basic_blocks. */ 376 /* ??? This whole issue would be much simpler if we called find_basic_blocks 377 exactly once, and thereafter we don't have a single long chain of 378 instructions at all until close to the end of compilation when we 379 actually lay them out. */ 380 381 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info"); 382 383 /* An array to record the active exception region at the end of each 384 basic block. It is filled in by find_basic_blocks_1 for make_edges. */ 385 bb_eh_end = (rtx *) alloca (n_basic_blocks * sizeof (rtx)); 386 387 label_value_list = find_basic_blocks_1 (f, bb_eh_end); 388 389 /* Record the block to which an insn belongs. */ 390 /* ??? This should be done another way, by which (perhaps) a label is 391 tagged directly with the basic block that it starts. It is used for 392 more than that currently, but IMO that is the only valid use. */ 393 394 max_uid = get_max_uid (); 395#ifdef AUTO_INC_DEC 396 /* Leave space for insns life_analysis makes in some cases for auto-inc. 397 These cases are rare, so we don't need too much space. */ 398 max_uid += max_uid / 10; 399#endif 400 401 VARRAY_BB_INIT (basic_block_for_insn, max_uid, "basic_block_for_insn"); 402 compute_bb_for_insn (basic_block_for_insn, max_uid); 403 404 /* Discover the edges of our cfg. */ 405 406 make_edges (label_value_list, bb_eh_end); 407 408 /* Delete unreachable blocks. */ 409 410 if (do_cleanup) 411 delete_unreachable_blocks (); 412 413 /* Mark critical edges. */ 414 415 mark_critical_edges (); 416 417 /* Discover the loop depth at the start of each basic block to aid 418 register allocation. */ 419 calculate_loop_depth (f); 420 421 /* Kill the data we won't maintain. */ 422 label_value_list = 0; 423 424#ifdef ENABLE_CHECKING 425 verify_flow_info (); 426#endif 427} 428 429/* Count the basic blocks of the function. */ 430 431static int 432count_basic_blocks (f) 433 rtx f; 434{ 435 register rtx insn; 436 register RTX_CODE prev_code; 437 register int count = 0; 438 int eh_region = 0; 439 int call_had_abnormal_edge = 0; 440 rtx prev_call = NULL_RTX; 441 442 prev_code = JUMP_INSN; 443 for (insn = f; insn; insn = NEXT_INSN (insn)) 444 { 445 register RTX_CODE code = GET_CODE (insn); 446 447 if (code == CODE_LABEL 448 || (GET_RTX_CLASS (code) == 'i' 449 && (prev_code == JUMP_INSN 450 || prev_code == BARRIER 451 || (prev_code == CALL_INSN && call_had_abnormal_edge)))) 452 { 453 count++; 454 455 /* If the previous insn was a call that did not create an 456 abnormal edge, we want to add a nop so that the CALL_INSN 457 itself is not at basic_block_end. This allows us to 458 easily distinguish between normal calls and those which 459 create abnormal edges in the flow graph. */ 460 461 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge) 462 { 463 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx); 464 emit_insn_after (nop, prev_call); 465 } 466 } 467 468 /* Record whether this call created an edge. */ 469 if (code == CALL_INSN) 470 { 471 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 472 int region = (note ? XINT (XEXP (note, 0), 0) : 1); 473 prev_call = insn; 474 call_had_abnormal_edge = 0; 475 476 /* If there is a specified EH region, we have an edge. */ 477 if (eh_region && region > 0) 478 call_had_abnormal_edge = 1; 479 else 480 { 481 /* If there is a nonlocal goto label and the specified 482 region number isn't -1, we have an edge. (0 means 483 no throw, but might have a nonlocal goto). */ 484 if (nonlocal_goto_handler_labels && region >= 0) 485 call_had_abnormal_edge = 1; 486 } 487 } 488 else if (code != NOTE) 489 prev_call = NULL_RTX; 490 491 if (code != NOTE) 492 prev_code = code; 493 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) 494 ++eh_region; 495 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END) 496 --eh_region; 497 498 } 499 500 /* The rest of the compiler works a bit smoother when we don't have to 501 check for the edge case of do-nothing functions with no basic blocks. */ 502 if (count == 0) 503 { 504 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx)); 505 count = 1; 506 } 507 508 return count; 509} 510 511/* Find all basic blocks of the function whose first insn is F. 512 Store the correct data in the tables that describe the basic blocks, 513 set up the chains of references for each CODE_LABEL, and 514 delete any entire basic blocks that cannot be reached. 515 516 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks 517 that are otherwise unreachable may be reachable with a non-local goto. 518 519 BB_EH_END is an array in which we record the list of exception regions 520 active at the end of every basic block. */ 521 522static rtx 523find_basic_blocks_1 (f, bb_eh_end) 524 rtx f; 525 rtx *bb_eh_end; 526{ 527 register rtx insn, next; 528 int call_has_abnormal_edge = 0; 529 int i = 0; 530 rtx bb_note = NULL_RTX; 531 rtx eh_list = NULL_RTX; 532 rtx label_value_list = NULL_RTX; 533 rtx head = NULL_RTX; 534 rtx end = NULL_RTX; 535 536 /* We process the instructions in a slightly different way than we did 537 previously. This is so that we see a NOTE_BASIC_BLOCK after we have 538 closed out the previous block, so that it gets attached at the proper 539 place. Since this form should be equivalent to the previous, 540 find_basic_blocks_0 continues to use the old form as a check. */ 541 542 for (insn = f; insn; insn = next) 543 { 544 enum rtx_code code = GET_CODE (insn); 545 546 next = NEXT_INSN (insn); 547 548 if (code == CALL_INSN) 549 { 550 /* Record whether this call created an edge. */ 551 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 552 int region = (note ? XINT (XEXP (note, 0), 0) : 1); 553 call_has_abnormal_edge = 0; 554 555 /* If there is an EH region, we have an edge. */ 556 if (eh_list && region > 0) 557 call_has_abnormal_edge = 1; 558 else 559 { 560 /* If there is a nonlocal goto label and the specified 561 region number isn't -1, we have an edge. (0 means 562 no throw, but might have a nonlocal goto). */ 563 if (nonlocal_goto_handler_labels && region >= 0) 564 call_has_abnormal_edge = 1; 565 } 566 } 567 568 switch (code) 569 { 570 case NOTE: 571 { 572 int kind = NOTE_LINE_NUMBER (insn); 573 574 /* Keep a LIFO list of the currently active exception notes. */ 575 if (kind == NOTE_INSN_EH_REGION_BEG) 576 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list); 577 else if (kind == NOTE_INSN_EH_REGION_END) 578 eh_list = XEXP (eh_list, 1); 579 580 /* Look for basic block notes with which to keep the 581 basic_block_info pointers stable. Unthread the note now; 582 we'll put it back at the right place in create_basic_block. 583 Or not at all if we've already found a note in this block. */ 584 else if (kind == NOTE_INSN_BASIC_BLOCK) 585 { 586 if (bb_note == NULL_RTX) 587 bb_note = insn; 588 next = flow_delete_insn (insn); 589 } 590 591 break; 592 } 593 594 case CODE_LABEL: 595 /* A basic block starts at a label. If we've closed one off due 596 to a barrier or some such, no need to do it again. */ 597 if (head != NULL_RTX) 598 { 599 /* While we now have edge lists with which other portions of 600 the compiler might determine a call ending a basic block 601 does not imply an abnormal edge, it will be a bit before 602 everything can be updated. So continue to emit a noop at 603 the end of such a block. */ 604 if (GET_CODE (end) == CALL_INSN) 605 { 606 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx); 607 end = emit_insn_after (nop, end); 608 } 609 610 bb_eh_end[i] = eh_list; 611 create_basic_block (i++, head, end, bb_note); 612 bb_note = NULL_RTX; 613 } 614 head = end = insn; 615 break; 616 617 case JUMP_INSN: 618 /* A basic block ends at a jump. */ 619 if (head == NULL_RTX) 620 head = insn; 621 else 622 { 623 /* ??? Make a special check for table jumps. The way this 624 happens is truely and amazingly gross. We are about to 625 create a basic block that contains just a code label and 626 an addr*vec jump insn. Worse, an addr_diff_vec creates 627 its own natural loop. 628 629 Prevent this bit of brain damage, pasting things together 630 correctly in make_edges. 631 632 The correct solution involves emitting the table directly 633 on the tablejump instruction as a note, or JUMP_LABEL. */ 634 635 if (GET_CODE (PATTERN (insn)) == ADDR_VEC 636 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) 637 { 638 head = end = NULL; 639 n_basic_blocks--; 640 break; 641 } 642 } 643 end = insn; 644 goto new_bb_inclusive; 645 646 case BARRIER: 647 /* A basic block ends at a barrier. It may be that an unconditional 648 jump already closed the basic block -- no need to do it again. */ 649 if (head == NULL_RTX) 650 break; 651 652 /* While we now have edge lists with which other portions of the 653 compiler might determine a call ending a basic block does not 654 imply an abnormal edge, it will be a bit before everything can 655 be updated. So continue to emit a noop at the end of such a 656 block. */ 657 if (GET_CODE (end) == CALL_INSN) 658 { 659 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx); 660 end = emit_insn_after (nop, end); 661 } 662 goto new_bb_exclusive; 663 664 case CALL_INSN: 665 /* A basic block ends at a call that can either throw or 666 do a non-local goto. */ 667 if (call_has_abnormal_edge) 668 { 669 new_bb_inclusive: 670 if (head == NULL_RTX) 671 head = insn; 672 end = insn; 673 674 new_bb_exclusive: 675 bb_eh_end[i] = eh_list; 676 create_basic_block (i++, head, end, bb_note); 677 head = end = NULL_RTX; 678 bb_note = NULL_RTX; 679 break; 680 } 681 /* FALLTHRU */ 682 683 default: 684 if (GET_RTX_CLASS (code) == 'i') 685 { 686 if (head == NULL_RTX) 687 head = insn; 688 end = insn; 689 } 690 break; 691 } 692 693 if (GET_RTX_CLASS (code) == 'i') 694 { 695 rtx note; 696 697 /* Make a list of all labels referred to other than by jumps 698 (which just don't have the REG_LABEL notes). 699 700 Make a special exception for labels followed by an ADDR*VEC, 701 as this would be a part of the tablejump setup code. 702 703 Make a special exception for the eh_return_stub_label, which 704 we know isn't part of any otherwise visible control flow. */ 705 706 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 707 if (REG_NOTE_KIND (note) == REG_LABEL) 708 { 709 rtx lab = XEXP (note, 0), next; 710 711 if (lab == eh_return_stub_label) 712 ; 713 else if ((next = next_nonnote_insn (lab)) != NULL 714 && GET_CODE (next) == JUMP_INSN 715 && (GET_CODE (PATTERN (next)) == ADDR_VEC 716 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) 717 ; 718 else 719 label_value_list 720 = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0), 721 label_value_list); 722 } 723 } 724 } 725 726 if (head != NULL_RTX) 727 { 728 bb_eh_end[i] = eh_list; 729 create_basic_block (i++, head, end, bb_note); 730 } 731 732 if (i != n_basic_blocks) 733 abort (); 734 735 return label_value_list; 736} 737 738/* Create a new basic block consisting of the instructions between 739 HEAD and END inclusive. Reuses the note and basic block struct 740 in BB_NOTE, if any. */ 741 742static void 743create_basic_block (index, head, end, bb_note) 744 int index; 745 rtx head, end, bb_note; 746{ 747 basic_block bb; 748 749 if (bb_note 750 && ! RTX_INTEGRATED_P (bb_note) 751 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL 752 && bb->aux == NULL) 753 { 754 /* If we found an existing note, thread it back onto the chain. */ 755 756 if (GET_CODE (head) == CODE_LABEL) 757 add_insn_after (bb_note, head); 758 else 759 { 760 add_insn_before (bb_note, head); 761 head = bb_note; 762 } 763 } 764 else 765 { 766 /* Otherwise we must create a note and a basic block structure. 767 Since we allow basic block structs in rtl, give the struct 768 the same lifetime by allocating it off the function obstack 769 rather than using malloc. */ 770 771 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb)); 772 memset (bb, 0, sizeof (*bb)); 773 774 if (GET_CODE (head) == CODE_LABEL) 775 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head); 776 else 777 { 778 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head); 779 head = bb_note; 780 } 781 NOTE_BASIC_BLOCK (bb_note) = bb; 782 } 783 784 /* Always include the bb note in the block. */ 785 if (NEXT_INSN (end) == bb_note) 786 end = bb_note; 787 788 bb->head = head; 789 bb->end = end; 790 bb->index = index; 791 BASIC_BLOCK (index) = bb; 792 793 /* Tag the block so that we know it has been used when considering 794 other basic block notes. */ 795 bb->aux = bb; 796} 797 798/* Records the basic block struct in BB_FOR_INSN, for every instruction 799 indexed by INSN_UID. MAX is the size of the array. */ 800 801static void 802compute_bb_for_insn (bb_for_insn, max) 803 varray_type bb_for_insn; 804 int max; 805{ 806 int i; 807 808 for (i = 0; i < n_basic_blocks; ++i) 809 { 810 basic_block bb = BASIC_BLOCK (i); 811 rtx insn, end; 812 813 end = bb->end; 814 insn = bb->head; 815 while (1) 816 { 817 int uid = INSN_UID (insn); 818 if (uid < max) 819 VARRAY_BB (bb_for_insn, uid) = bb; 820 if (insn == end) 821 break; 822 insn = NEXT_INSN (insn); 823 } 824 } 825} 826 827/* Free the memory associated with the edge structures. */ 828 829static void 830clear_edges () 831{ 832 int i; 833 edge n, e; 834 835 for (i = 0; i < n_basic_blocks; ++i) 836 { 837 basic_block bb = BASIC_BLOCK (i); 838 839 for (e = bb->succ; e ; e = n) 840 { 841 n = e->succ_next; 842 free (e); 843 } 844 845 bb->succ = 0; 846 bb->pred = 0; 847 } 848 849 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n) 850 { 851 n = e->succ_next; 852 free (e); 853 } 854 855 ENTRY_BLOCK_PTR->succ = 0; 856 EXIT_BLOCK_PTR->pred = 0; 857} 858 859/* Identify the edges between basic blocks. 860 861 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks 862 that are otherwise unreachable may be reachable with a non-local goto. 863 864 BB_EH_END is an array indexed by basic block number in which we record 865 the list of exception regions active at the end of the basic block. */ 866 867static void 868make_edges (label_value_list, bb_eh_end) 869 rtx label_value_list; 870 rtx *bb_eh_end; 871{ 872 int i; 873 874 /* Assume no computed jump; revise as we create edges. */ 875 current_function_has_computed_jump = 0; 876 877 /* By nature of the way these get numbered, block 0 is always the entry. */ 878 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU); 879 880 for (i = 0; i < n_basic_blocks; ++i) 881 { 882 basic_block bb = BASIC_BLOCK (i); 883 rtx insn, x, eh_list; 884 enum rtx_code code; 885 int force_fallthru = 0; 886 887 /* If we have asynchronous exceptions, scan the notes for all exception 888 regions active in the block. In the normal case, we only need the 889 one active at the end of the block, which is bb_eh_end[i]. */ 890 891 eh_list = bb_eh_end[i]; 892 if (asynchronous_exceptions) 893 { 894 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn)) 895 { 896 if (GET_CODE (insn) == NOTE 897 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END) 898 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list); 899 } 900 } 901 902 /* Now examine the last instruction of the block, and discover the 903 ways we can leave the block. */ 904 905 insn = bb->end; 906 code = GET_CODE (insn); 907 908 /* A branch. */ 909 if (code == JUMP_INSN) 910 { 911 rtx tmp; 912 913 /* ??? Recognize a tablejump and do the right thing. */ 914 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX 915 && (tmp = NEXT_INSN (tmp)) != NULL_RTX 916 && GET_CODE (tmp) == JUMP_INSN 917 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC 918 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)) 919 { 920 rtvec vec; 921 int j; 922 923 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC) 924 vec = XVEC (PATTERN (tmp), 0); 925 else 926 vec = XVEC (PATTERN (tmp), 1); 927 928 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) 929 make_label_edge (bb, XEXP (RTVEC_ELT (vec, j), 0), 0); 930 931 /* Some targets (eg, ARM) emit a conditional jump that also 932 contains the out-of-range target. Scan for these and 933 add an edge if necessary. */ 934 if ((tmp = single_set (insn)) != NULL 935 && SET_DEST (tmp) == pc_rtx 936 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE 937 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) 938 make_label_edge (bb, XEXP (XEXP (SET_SRC (tmp), 2), 0), 0); 939 940#ifdef CASE_DROPS_THROUGH 941 /* Silly VAXen. The ADDR_VEC is going to be in the way of 942 us naturally detecting fallthru into the next block. */ 943 force_fallthru = 1; 944#endif 945 } 946 947 /* If this is a computed jump, then mark it as reaching 948 everything on the label_value_list and forced_labels list. */ 949 else if (computed_jump_p (insn)) 950 { 951 current_function_has_computed_jump = 1; 952 953 for (x = label_value_list; x; x = XEXP (x, 1)) 954 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL); 955 956 for (x = forced_labels; x; x = XEXP (x, 1)) 957 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL); 958 } 959 960 /* Returns create an exit out. */ 961 else if (returnjump_p (insn)) 962 make_edge (bb, EXIT_BLOCK_PTR, 0); 963 964 /* Otherwise, we have a plain conditional or unconditional jump. */ 965 else 966 { 967 if (! JUMP_LABEL (insn)) 968 abort (); 969 make_label_edge (bb, JUMP_LABEL (insn), 0); 970 } 971 } 972 973 /* If this is a CALL_INSN, then mark it as reaching the active EH 974 handler for this CALL_INSN. If we're handling asynchronous 975 exceptions then any insn can reach any of the active handlers. 976 977 Also mark the CALL_INSN as reaching any nonlocal goto handler. */ 978 979 if (code == CALL_INSN || asynchronous_exceptions) 980 { 981 int is_call = (code == CALL_INSN ? EDGE_ABNORMAL_CALL : 0); 982 handler_info *ptr; 983 984 /* Use REG_EH_RETHROW and REG_EH_REGION if available. */ 985 /* ??? REG_EH_REGION is not generated presently. Is it 986 inteded that there be multiple notes for the regions? 987 or is my eh_list collection redundant with handler linking? */ 988 989 x = find_reg_note (insn, REG_EH_RETHROW, 0); 990 if (!x) 991 x = find_reg_note (insn, REG_EH_REGION, 0); 992 if (x) 993 { 994 if (XINT (XEXP (x, 0), 0) > 0) 995 { 996 ptr = get_first_handler (XINT (XEXP (x, 0), 0)); 997 while (ptr) 998 { 999 make_label_edge (bb, ptr->handler_label, 1000 EDGE_ABNORMAL | EDGE_EH | is_call); 1001 ptr = ptr->next; 1002 } 1003 } 1004 } 1005 else 1006 { 1007 for (x = eh_list; x; x = XEXP (x, 1)) 1008 { 1009 ptr = get_first_handler (NOTE_BLOCK_NUMBER (XEXP (x, 0))); 1010 while (ptr) 1011 { 1012 make_label_edge (bb, ptr->handler_label, 1013 EDGE_ABNORMAL | EDGE_EH | is_call); 1014 ptr = ptr->next; 1015 } 1016 } 1017 } 1018 1019 if (code == CALL_INSN && nonlocal_goto_handler_labels) 1020 { 1021 /* ??? This could be made smarter: in some cases it's possible 1022 to tell that certain calls will not do a nonlocal goto. 1023 1024 For example, if the nested functions that do the nonlocal 1025 gotos do not have their addresses taken, then only calls to 1026 those functions or to other nested functions that use them 1027 could possibly do nonlocal gotos. */ 1028 1029 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1)) 1030 make_label_edge (bb, XEXP (x, 0), 1031 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); 1032 } 1033 } 1034 1035 /* We know something about the structure of the function __throw in 1036 libgcc2.c. It is the only function that ever contains eh_stub 1037 labels. It modifies its return address so that the last block 1038 returns to one of the eh_stub labels within it. So we have to 1039 make additional edges in the flow graph. */ 1040 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0) 1041 make_label_edge (bb, eh_return_stub_label, EDGE_EH); 1042 1043 /* Find out if we can drop through to the next block. */ 1044 insn = next_nonnote_insn (insn); 1045 if (!insn || (i + 1 == n_basic_blocks && force_fallthru)) 1046 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU); 1047 else if (i + 1 < n_basic_blocks) 1048 { 1049 rtx tmp = BLOCK_HEAD (i + 1); 1050 if (GET_CODE (tmp) == NOTE) 1051 tmp = next_nonnote_insn (tmp); 1052 if (force_fallthru || insn == tmp) 1053 make_edge (bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU); 1054 } 1055 } 1056} 1057 1058/* Create an edge between two basic blocks. FLAGS are auxiliary information 1059 about the edge that is accumulated between calls. */ 1060 1061static void 1062make_edge (src, dst, flags) 1063 basic_block src, dst; 1064 int flags; 1065{ 1066 edge e; 1067 1068 /* Make sure we don't add duplicate edges. */ 1069 1070 for (e = src->succ; e ; e = e->succ_next) 1071 if (e->dest == dst) 1072 { 1073 e->flags |= flags; 1074 return; 1075 } 1076 1077 e = (edge) xcalloc (1, sizeof (*e)); 1078 1079 e->succ_next = src->succ; 1080 e->pred_next = dst->pred; 1081 e->src = src; 1082 e->dest = dst; 1083 e->flags = flags; 1084 1085 src->succ = e; 1086 dst->pred = e; 1087} 1088 1089/* Create an edge from a basic block to a label. */ 1090 1091static void 1092make_label_edge (src, label, flags) 1093 basic_block src; 1094 rtx label; 1095 int flags; 1096{ 1097 if (GET_CODE (label) != CODE_LABEL) 1098 abort (); 1099 1100 /* If the label was never emitted, this insn is junk, but avoid a 1101 crash trying to refer to BLOCK_FOR_INSN (label). This can happen 1102 as a result of a syntax error and a diagnostic has already been 1103 printed. */ 1104 1105 if (INSN_UID (label) == 0) 1106 return; 1107 1108 make_edge (src, BLOCK_FOR_INSN (label), flags); 1109} 1110 1111/* Identify critical edges and set the bits appropriately. */ 1112static void 1113mark_critical_edges () 1114{ 1115 int i, n = n_basic_blocks; 1116 basic_block bb; 1117 1118 /* We begin with the entry block. This is not terribly important now, 1119 but could be if a front end (Fortran) implemented alternate entry 1120 points. */ 1121 bb = ENTRY_BLOCK_PTR; 1122 i = -1; 1123 1124 while (1) 1125 { 1126 edge e; 1127 1128 /* (1) Critical edges must have a source with multiple successors. */ 1129 if (bb->succ && bb->succ->succ_next) 1130 { 1131 for (e = bb->succ; e ; e = e->succ_next) 1132 { 1133 /* (2) Critical edges must have a destination with multiple 1134 predecessors. Note that we know there is at least one 1135 predecessor -- the edge we followed to get here. */ 1136 if (e->dest->pred->pred_next) 1137 e->flags |= EDGE_CRITICAL; 1138 else 1139 e->flags &= ~EDGE_CRITICAL; 1140 } 1141 } 1142 else 1143 { 1144 for (e = bb->succ; e ; e = e->succ_next) 1145 e->flags &= ~EDGE_CRITICAL; 1146 } 1147 1148 if (++i >= n) 1149 break; 1150 bb = BASIC_BLOCK (i); 1151 } 1152} 1153 1154/* Split a (typically critical) edge. Return the new block. 1155 Abort on abnormal edges. 1156 1157 ??? The code generally expects to be called on critical edges. 1158 The case of a block ending in an unconditional jump to a 1159 block with multiple predecessors is not handled optimally. */ 1160 1161basic_block 1162split_edge (edge_in) 1163 edge edge_in; 1164{ 1165 basic_block old_pred, bb, old_succ; 1166 edge edge_out; 1167 rtx bb_note; 1168 int i; 1169 1170 /* Abnormal edges cannot be split. */ 1171 if ((edge_in->flags & EDGE_ABNORMAL) != 0) 1172 abort (); 1173 1174 old_pred = edge_in->src; 1175 old_succ = edge_in->dest; 1176 1177 /* Remove the existing edge from the destination's pred list. */ 1178 { 1179 edge *pp; 1180 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next) 1181 continue; 1182 *pp = edge_in->pred_next; 1183 edge_in->pred_next = NULL; 1184 } 1185 1186 /* Create the new structures. */ 1187 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb)); 1188 edge_out = (edge) xcalloc (1, sizeof (*edge_out)); 1189 1190 memset (bb, 0, sizeof (*bb)); 1191 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack); 1192 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack); 1193 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack); 1194 1195 /* ??? This info is likely going to be out of date very soon. */ 1196 CLEAR_REG_SET (bb->local_set); 1197 if (old_succ->global_live_at_start) 1198 { 1199 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start); 1200 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start); 1201 } 1202 else 1203 { 1204 CLEAR_REG_SET (bb->global_live_at_start); 1205 CLEAR_REG_SET (bb->global_live_at_end); 1206 } 1207 1208 /* Wire them up. */ 1209 bb->pred = edge_in; 1210 bb->succ = edge_out; 1211 1212 edge_in->dest = bb; 1213 edge_in->flags &= ~EDGE_CRITICAL; 1214 1215 edge_out->pred_next = old_succ->pred; 1216 edge_out->succ_next = NULL; 1217 edge_out->src = bb; 1218 edge_out->dest = old_succ; 1219 edge_out->flags = EDGE_FALLTHRU; 1220 edge_out->probability = REG_BR_PROB_BASE; 1221 1222 old_succ->pred = edge_out; 1223 1224 /* Tricky case -- if there existed a fallthru into the successor 1225 (and we're not it) we must add a new unconditional jump around 1226 the new block we're actually interested in. 1227 1228 Further, if that edge is critical, this means a second new basic 1229 block must be created to hold it. In order to simplify correct 1230 insn placement, do this before we touch the existing basic block 1231 ordering for the block we were really wanting. */ 1232 if ((edge_in->flags & EDGE_FALLTHRU) == 0) 1233 { 1234 edge e; 1235 for (e = edge_out->pred_next; e ; e = e->pred_next) 1236 if (e->flags & EDGE_FALLTHRU) 1237 break; 1238 1239 if (e) 1240 { 1241 basic_block jump_block; 1242 rtx pos; 1243 1244 if ((e->flags & EDGE_CRITICAL) == 0) 1245 { 1246 /* Non critical -- we can simply add a jump to the end 1247 of the existing predecessor. */ 1248 jump_block = e->src; 1249 } 1250 else 1251 { 1252 /* We need a new block to hold the jump. The simplest 1253 way to do the bulk of the work here is to recursively 1254 call ourselves. */ 1255 jump_block = split_edge (e); 1256 e = jump_block->succ; 1257 } 1258 1259 /* Now add the jump insn ... */ 1260 pos = emit_jump_insn_after (gen_jump (old_succ->head), 1261 jump_block->end); 1262 jump_block->end = pos; 1263 emit_barrier_after (pos); 1264 1265 /* ... let jump know that label is in use, ... */ 1266 ++LABEL_NUSES (old_succ->head); 1267 1268 /* ... and clear fallthru on the outgoing edge. */ 1269 e->flags &= ~EDGE_FALLTHRU; 1270 1271 /* Continue splitting the interesting edge. */ 1272 } 1273 } 1274 1275 /* Place the new block just in front of the successor. */ 1276 VARRAY_GROW (basic_block_info, ++n_basic_blocks); 1277 for (i = n_basic_blocks - 1; i > old_succ->index; --i) 1278 { 1279 basic_block tmp = BASIC_BLOCK (i - 1); 1280 BASIC_BLOCK (i) = tmp; 1281 tmp->index = i; 1282 } 1283 BASIC_BLOCK (i) = bb; 1284 bb->index = i; 1285 1286 /* Create the basic block note. */ 1287 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head); 1288 NOTE_BASIC_BLOCK (bb_note) = bb; 1289 bb->head = bb->end = bb_note; 1290 1291 /* Not quite simple -- for non-fallthru edges, we must adjust the 1292 predecessor's jump instruction to target our new block. */ 1293 if ((edge_in->flags & EDGE_FALLTHRU) == 0) 1294 { 1295 rtx tmp, insn = old_pred->end; 1296 rtx old_label = old_succ->head; 1297 rtx new_label = gen_label_rtx (); 1298 1299 if (GET_CODE (insn) != JUMP_INSN) 1300 abort (); 1301 1302 /* ??? Recognize a tablejump and adjust all matching cases. */ 1303 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX 1304 && (tmp = NEXT_INSN (tmp)) != NULL_RTX 1305 && GET_CODE (tmp) == JUMP_INSN 1306 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC 1307 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)) 1308 { 1309 rtvec vec; 1310 int j; 1311 1312 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC) 1313 vec = XVEC (PATTERN (tmp), 0); 1314 else 1315 vec = XVEC (PATTERN (tmp), 1); 1316 1317 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) 1318 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label) 1319 { 1320 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label); 1321 --LABEL_NUSES (old_label); 1322 ++LABEL_NUSES (new_label); 1323 } 1324 } 1325 else 1326 { 1327 /* This would have indicated an abnormal edge. */ 1328 if (computed_jump_p (insn)) 1329 abort (); 1330 1331 /* A return instruction can't be redirected. */ 1332 if (returnjump_p (insn)) 1333 abort (); 1334 1335 /* If the insn doesn't go where we think, we're confused. */ 1336 if (JUMP_LABEL (insn) != old_label) 1337 abort (); 1338 1339 redirect_jump (insn, new_label); 1340 } 1341 1342 emit_label_before (new_label, bb_note); 1343 bb->head = new_label; 1344 } 1345 1346 return bb; 1347} 1348 1349/* Queue instructions for insertion on an edge between two basic blocks. 1350 The new instructions and basic blocks (if any) will not appear in the 1351 CFG until commit_edge_insertions is called. */ 1352 1353void 1354insert_insn_on_edge (pattern, e) 1355 rtx pattern; 1356 edge e; 1357{ 1358 /* We cannot insert instructions on an abnormal critical edge. 1359 It will be easier to find the culprit if we die now. */ 1360 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL)) 1361 == (EDGE_ABNORMAL|EDGE_CRITICAL)) 1362 abort (); 1363 1364 if (e->insns == NULL_RTX) 1365 start_sequence (); 1366 else 1367 push_to_sequence (e->insns); 1368 1369 emit_insn (pattern); 1370 1371 e->insns = get_insns (); 1372 end_sequence(); 1373} 1374 1375/* Update the CFG for the instructions queued on edge E. */ 1376 1377static void 1378commit_one_edge_insertion (e) 1379 edge e; 1380{ 1381 rtx before = NULL_RTX, after = NULL_RTX, tmp; 1382 basic_block bb; 1383 1384 /* Figure out where to put these things. If the destination has 1385 one predecessor, insert there. Except for the exit block. */ 1386 if (e->dest->pred->pred_next == NULL 1387 && e->dest != EXIT_BLOCK_PTR) 1388 { 1389 bb = e->dest; 1390 1391 /* Get the location correct wrt a code label, and "nice" wrt 1392 a basic block note, and before everything else. */ 1393 tmp = bb->head; 1394 if (GET_CODE (tmp) == CODE_LABEL) 1395 tmp = NEXT_INSN (tmp); 1396 if (GET_CODE (tmp) == NOTE 1397 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK) 1398 tmp = NEXT_INSN (tmp); 1399 if (tmp == bb->head) 1400 before = tmp; 1401 else 1402 after = PREV_INSN (tmp); 1403 } 1404 1405 /* If the source has one successor and the edge is not abnormal, 1406 insert there. Except for the entry block. */ 1407 else if ((e->flags & EDGE_ABNORMAL) == 0 1408 && e->src->succ->succ_next == NULL 1409 && e->src != ENTRY_BLOCK_PTR) 1410 { 1411 bb = e->src; 1412 if (GET_CODE (bb->end) == JUMP_INSN) 1413 { 1414 /* ??? Is it possible to wind up with non-simple jumps? Perhaps 1415 a jump with delay slots already filled? */ 1416 if (! simplejump_p (bb->end)) 1417 abort (); 1418 1419 before = bb->end; 1420 } 1421 else 1422 { 1423 /* We'd better be fallthru, or we've lost track of what's what. */ 1424 if ((e->flags & EDGE_FALLTHRU) == 0) 1425 abort (); 1426 1427 after = bb->end; 1428 } 1429 } 1430 1431 /* Otherwise we must split the edge. */ 1432 else 1433 { 1434 bb = split_edge (e); 1435 after = bb->end; 1436 } 1437 1438 /* Now that we've found the spot, do the insertion. */ 1439 tmp = e->insns; 1440 e->insns = NULL_RTX; 1441 if (before) 1442 { 1443 emit_insns_before (tmp, before); 1444 if (before == bb->head) 1445 bb->head = before; 1446 } 1447 else 1448 { 1449 tmp = emit_insns_after (tmp, after); 1450 if (after == bb->end) 1451 bb->end = tmp; 1452 } 1453} 1454 1455/* Update the CFG for all queued instructions. */ 1456 1457void 1458commit_edge_insertions () 1459{ 1460 int i; 1461 basic_block bb; 1462 1463 i = -1; 1464 bb = ENTRY_BLOCK_PTR; 1465 while (1) 1466 { 1467 edge e, next; 1468 1469 for (e = bb->succ; e ; e = next) 1470 { 1471 next = e->succ_next; 1472 if (e->insns) 1473 commit_one_edge_insertion (e); 1474 } 1475 1476 if (++i >= n_basic_blocks) 1477 break; 1478 bb = BASIC_BLOCK (i); 1479 } 1480} 1481 1482/* Delete all unreachable basic blocks. */ 1483 1484static void 1485delete_unreachable_blocks () 1486{ 1487 basic_block *worklist, *tos; 1488 int deleted_handler; 1489 edge e; 1490 int i, n; 1491 1492 n = n_basic_blocks; 1493 tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n); 1494 1495 /* Use basic_block->aux as a marker. Clear them all. */ 1496 1497 for (i = 0; i < n; ++i) 1498 BASIC_BLOCK (i)->aux = NULL; 1499 1500 /* Add our starting points to the worklist. Almost always there will 1501 be only one. It isn't inconcievable that we might one day directly 1502 support Fortran alternate entry points. */ 1503 1504 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next) 1505 { 1506 *tos++ = e->dest; 1507 1508 /* Mark the block with a handy non-null value. */ 1509 e->dest->aux = e; 1510 } 1511 1512 /* Iterate: find everything reachable from what we've already seen. */ 1513 1514 while (tos != worklist) 1515 { 1516 basic_block b = *--tos; 1517 1518 for (e = b->succ; e ; e = e->succ_next) 1519 if (!e->dest->aux) 1520 { 1521 *tos++ = e->dest; 1522 e->dest->aux = e; 1523 } 1524 } 1525 1526 /* Delete all unreachable basic blocks. Count down so that we don't 1527 interfere with the block renumbering that happens in delete_block. */ 1528 1529 deleted_handler = 0; 1530 1531 for (i = n - 1; i >= 0; --i) 1532 { 1533 basic_block b = BASIC_BLOCK (i); 1534 1535 if (b->aux != NULL) 1536 /* This block was found. Tidy up the mark. */ 1537 b->aux = NULL; 1538 else 1539 deleted_handler |= delete_block (b); 1540 } 1541 1542 /* Fix up edges that now fall through, or rather should now fall through 1543 but previously required a jump around now deleted blocks. Simplify 1544 the search by only examining blocks numerically adjacent, since this 1545 is how find_basic_blocks created them. */ 1546 1547 for (i = 1; i < n_basic_blocks; ++i) 1548 { 1549 basic_block b = BASIC_BLOCK (i - 1); 1550 basic_block c = BASIC_BLOCK (i); 1551 edge s; 1552 1553 /* We care about simple conditional or unconditional jumps with 1554 a single successor. 1555 1556 If we had a conditional branch to the next instruction when 1557 find_basic_blocks was called, then there will only be one 1558 out edge for the block which ended with the conditional 1559 branch (since we do not create duplicate edges). 1560 1561 Furthermore, the edge will be marked as a fallthru because we 1562 merge the flags for the duplicate edges. So we do not want to 1563 check that the edge is not a FALLTHRU edge. */ 1564 if ((s = b->succ) != NULL 1565 && s->succ_next == NULL 1566 && s->dest == c 1567 /* If the last insn is not a normal conditional jump 1568 (or an unconditional jump), then we can not tidy the 1569 fallthru edge because we can not delete the jump. */ 1570 && GET_CODE (b->end) == JUMP_INSN 1571 && condjump_p (b->end)) 1572 tidy_fallthru_edge (s, b, c); 1573 } 1574 1575 /* Attempt to merge blocks as made possible by edge removal. If a block 1576 has only one successor, and the successor has only one predecessor, 1577 they may be combined. */ 1578 1579 for (i = 0; i < n_basic_blocks; ) 1580 { 1581 basic_block c, b = BASIC_BLOCK (i); 1582 edge s; 1583 1584 /* A loop because chains of blocks might be combineable. */ 1585 while ((s = b->succ) != NULL 1586 && s->succ_next == NULL 1587 && (s->flags & EDGE_EH) == 0 1588 && (c = s->dest) != EXIT_BLOCK_PTR 1589 && c->pred->pred_next == NULL 1590 /* If the last insn is not a normal conditional jump 1591 (or an unconditional jump), then we can not merge 1592 the blocks because we can not delete the jump. */ 1593 && GET_CODE (b->end) == JUMP_INSN 1594 && condjump_p (b->end) 1595 && merge_blocks (s, b, c)) 1596 continue; 1597 1598 /* Don't get confused by the index shift caused by deleting blocks. */ 1599 i = b->index + 1; 1600 } 1601 1602 /* If we deleted an exception handler, we may have EH region begin/end 1603 blocks to remove as well. */ 1604 if (deleted_handler) 1605 delete_eh_regions (); 1606} 1607 1608/* Find EH regions for which there is no longer a handler, and delete them. */ 1609 1610static void 1611delete_eh_regions () 1612{ 1613 rtx insn; 1614 1615 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 1616 if (GET_CODE (insn) == NOTE) 1617 { 1618 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) || 1619 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)) 1620 { 1621 int num = CODE_LABEL_NUMBER (insn); 1622 /* A NULL handler indicates a region is no longer needed */ 1623 if (get_first_handler (num) == NULL) 1624 { 1625 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 1626 NOTE_SOURCE_FILE (insn) = 0; 1627 } 1628 } 1629 } 1630} 1631 1632/* Return true if NOTE is not one of the ones that must be kept paired, 1633 so that we may simply delete them. */ 1634 1635static int 1636can_delete_note_p (note) 1637 rtx note; 1638{ 1639 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED 1640 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK); 1641} 1642 1643/* Unlink a chain of insns between START and FINISH, leaving notes 1644 that must be paired. */ 1645 1646static void 1647delete_insn_chain (start, finish) 1648 rtx start, finish; 1649{ 1650 /* Unchain the insns one by one. It would be quicker to delete all 1651 of these with a single unchaining, rather than one at a time, but 1652 we need to keep the NOTE's. */ 1653 1654 rtx next; 1655 1656 while (1) 1657 { 1658 next = NEXT_INSN (start); 1659 if (GET_CODE (start) == NOTE && !can_delete_note_p (start)) 1660 ; 1661 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start)) 1662 ; 1663 else 1664 next = flow_delete_insn (start); 1665 1666 if (start == finish) 1667 break; 1668 start = next; 1669 } 1670} 1671 1672/* Delete the insns in a (non-live) block. We physically delete every 1673 non-deleted-note insn, and update the flow graph appropriately. 1674 1675 Return nonzero if we deleted an exception handler. */ 1676 1677/* ??? Preserving all such notes strikes me as wrong. It would be nice 1678 to post-process the stream to remove empty blocks, loops, ranges, etc. */ 1679 1680static int 1681delete_block (b) 1682 basic_block b; 1683{ 1684 int deleted_handler = 0; 1685 rtx insn, end, tmp; 1686 1687 /* If the head of this block is a CODE_LABEL, then it might be the 1688 label for an exception handler which can't be reached. 1689 1690 We need to remove the label from the exception_handler_label list 1691 and remove the associated NOTE_EH_REGION_BEG and NOTE_EH_REGION_END 1692 notes. */ 1693 1694 insn = b->head; 1695 1696 if (GET_CODE (insn) == CODE_LABEL) 1697 { 1698 rtx x, *prev = &exception_handler_labels; 1699 1700 for (x = exception_handler_labels; x; x = XEXP (x, 1)) 1701 { 1702 if (XEXP (x, 0) == insn) 1703 { 1704 /* Found a match, splice this label out of the EH label list. */ 1705 *prev = XEXP (x, 1); 1706 XEXP (x, 1) = NULL_RTX; 1707 XEXP (x, 0) = NULL_RTX; 1708 1709 /* Remove the handler from all regions */ 1710 remove_handler (insn); 1711 deleted_handler = 1; 1712 break; 1713 } 1714 prev = &XEXP (x, 1); 1715 } 1716 1717 /* This label may be referenced by code solely for its value, or 1718 referenced by static data, or something. We have determined 1719 that it is not reachable, but cannot delete the label itself. 1720 Save code space and continue to delete the balance of the block, 1721 along with properly updating the cfg. */ 1722 if (!can_delete_label_p (insn)) 1723 { 1724 /* If we've only got one of these, skip the whole deleting 1725 insns thing. */ 1726 if (insn == b->end) 1727 goto no_delete_insns; 1728 insn = NEXT_INSN (insn); 1729 } 1730 } 1731 1732 /* Include any jump table following the basic block. */ 1733 end = b->end; 1734 if (GET_CODE (end) == JUMP_INSN 1735 && (tmp = JUMP_LABEL (end)) != NULL_RTX 1736 && (tmp = NEXT_INSN (tmp)) != NULL_RTX 1737 && GET_CODE (tmp) == JUMP_INSN 1738 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC 1739 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)) 1740 end = tmp; 1741 1742 /* Include any barrier that may follow the basic block. */ 1743 tmp = next_nonnote_insn (b->end); 1744 if (tmp && GET_CODE (tmp) == BARRIER) 1745 end = tmp; 1746 1747 delete_insn_chain (insn, end); 1748 1749no_delete_insns: 1750 1751 /* Remove the edges into and out of this block. Note that there may 1752 indeed be edges in, if we are removing an unreachable loop. */ 1753 { 1754 edge e, next, *q; 1755 1756 for (e = b->pred; e ; e = next) 1757 { 1758 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next) 1759 continue; 1760 *q = e->succ_next; 1761 next = e->pred_next; 1762 free (e); 1763 } 1764 for (e = b->succ; e ; e = next) 1765 { 1766 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next) 1767 continue; 1768 *q = e->pred_next; 1769 next = e->succ_next; 1770 free (e); 1771 } 1772 1773 b->pred = NULL; 1774 b->succ = NULL; 1775 } 1776 1777 /* Remove the basic block from the array, and compact behind it. */ 1778 expunge_block (b); 1779 1780 return deleted_handler; 1781} 1782 1783/* Remove block B from the basic block array and compact behind it. */ 1784 1785static void 1786expunge_block (b) 1787 basic_block b; 1788{ 1789 int i, n = n_basic_blocks; 1790 1791 for (i = b->index; i + 1 < n; ++i) 1792 { 1793 basic_block x = BASIC_BLOCK (i + 1); 1794 BASIC_BLOCK (i) = x; 1795 x->index = i; 1796 } 1797 1798 basic_block_info->num_elements--; 1799 n_basic_blocks--; 1800} 1801 1802/* Delete INSN by patching it out. Return the next insn. */ 1803 1804static rtx 1805flow_delete_insn (insn) 1806 rtx insn; 1807{ 1808 rtx prev = PREV_INSN (insn); 1809 rtx next = NEXT_INSN (insn); 1810 rtx note; 1811 1812 PREV_INSN (insn) = NULL_RTX; 1813 NEXT_INSN (insn) = NULL_RTX; 1814 1815 if (prev) 1816 NEXT_INSN (prev) = next; 1817 if (next) 1818 PREV_INSN (next) = prev; 1819 else 1820 set_last_insn (prev); 1821 1822 if (GET_CODE (insn) == CODE_LABEL) 1823 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels); 1824 1825 /* If deleting a jump, decrement the use count of the label. Deleting 1826 the label itself should happen in the normal course of block merging. */ 1827 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn)) 1828 LABEL_NUSES (JUMP_LABEL (insn))--; 1829 1830 /* Also if deleting an insn that references a label. */ 1831 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX) 1832 LABEL_NUSES (XEXP (note, 0))--; 1833 1834 return next; 1835} 1836 1837/* True if a given label can be deleted. */ 1838 1839static int 1840can_delete_label_p (label) 1841 rtx label; 1842{ 1843 rtx x; 1844 1845 if (LABEL_PRESERVE_P (label)) 1846 return 0; 1847 1848 for (x = forced_labels; x ; x = XEXP (x, 1)) 1849 if (label == XEXP (x, 0)) 1850 return 0; 1851 for (x = label_value_list; x ; x = XEXP (x, 1)) 1852 if (label == XEXP (x, 0)) 1853 return 0; 1854 for (x = exception_handler_labels; x ; x = XEXP (x, 1)) 1855 if (label == XEXP (x, 0)) 1856 return 0; 1857 1858 /* User declared labels must be preserved. */ 1859 if (LABEL_NAME (label) != 0) 1860 return 0; 1861 1862 return 1; 1863} 1864 1865/* Blocks A and B are to be merged into a single block. The insns 1866 are already contiguous, hence `nomove'. */ 1867 1868static void 1869merge_blocks_nomove (a, b) 1870 basic_block a, b; 1871{ 1872 edge e; 1873 rtx b_head, b_end, a_end; 1874 int b_empty = 0; 1875 1876 /* If there was a CODE_LABEL beginning B, delete it. */ 1877 b_head = b->head; 1878 b_end = b->end; 1879 if (GET_CODE (b_head) == CODE_LABEL) 1880 { 1881 /* Detect basic blocks with nothing but a label. This can happen 1882 in particular at the end of a function. */ 1883 if (b_head == b_end) 1884 b_empty = 1; 1885 b_head = flow_delete_insn (b_head); 1886 } 1887 1888 /* Delete the basic block note. */ 1889 if (GET_CODE (b_head) == NOTE 1890 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK) 1891 { 1892 if (b_head == b_end) 1893 b_empty = 1; 1894 b_head = flow_delete_insn (b_head); 1895 } 1896 1897 /* If there was a jump out of A, delete it. */ 1898 a_end = a->end; 1899 if (GET_CODE (a_end) == JUMP_INSN) 1900 { 1901 rtx prev; 1902 1903 prev = prev_nonnote_insn (a_end); 1904 if (!prev) 1905 prev = a->head; 1906 1907#ifdef HAVE_cc0 1908 /* If this was a conditional jump, we need to also delete 1909 the insn that set cc0. */ 1910 1911 if (prev && sets_cc0_p (prev)) 1912 { 1913 rtx tmp = prev; 1914 prev = prev_nonnote_insn (prev); 1915 if (!prev) 1916 prev = a->head; 1917 flow_delete_insn (tmp); 1918 } 1919#endif 1920 1921 /* Note that a->head != a->end, since we should have at least a 1922 bb note plus the jump, so prev != insn. */ 1923 flow_delete_insn (a_end); 1924 a_end = prev; 1925 } 1926 1927 /* By definition, there should only be one successor of A, and that is 1928 B. Free that edge struct. */ 1929 free (a->succ); 1930 1931 /* Adjust the edges out of B for the new owner. */ 1932 for (e = b->succ; e ; e = e->succ_next) 1933 e->src = a; 1934 a->succ = b->succ; 1935 1936 /* Reassociate the insns of B with A. */ 1937 if (!b_empty) 1938 { 1939 BLOCK_FOR_INSN (b_head) = a; 1940 while (b_head != b_end) 1941 { 1942 b_head = NEXT_INSN (b_head); 1943 BLOCK_FOR_INSN (b_head) = a; 1944 } 1945 a_end = b_head; 1946 } 1947 a->end = a_end; 1948 1949 /* Compact the basic block array. */ 1950 expunge_block (b); 1951} 1952 1953/* Attempt to merge basic blocks that are potentially non-adjacent. 1954 Return true iff the attempt succeeded. */ 1955 1956static int 1957merge_blocks (e, b, c) 1958 edge e; 1959 basic_block b, c; 1960{ 1961 /* If B has a fallthru edge to C, no need to move anything. */ 1962 if (!(e->flags & EDGE_FALLTHRU)) 1963 { 1964 /* ??? From here on out we must make sure to not munge nesting 1965 of exception regions and lexical blocks. Need to think about 1966 these cases before this gets implemented. */ 1967 return 0; 1968 1969 /* If C has an outgoing fallthru, and B does not have an incoming 1970 fallthru, move B before C. The later clause is somewhat arbitrary, 1971 but avoids modifying blocks other than the two we've been given. */ 1972 1973 /* Otherwise, move C after B. If C had a fallthru, which doesn't 1974 happen to be the physical successor to B, insert an unconditional 1975 branch. If C already ended with a conditional branch, the new 1976 jump must go in a new basic block D. */ 1977 } 1978 1979 /* If a label still appears somewhere and we cannot delete the label, 1980 then we cannot merge the blocks. The edge was tidied already. */ 1981 { 1982 rtx insn, stop = NEXT_INSN (c->head); 1983 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn)) 1984 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn)) 1985 return 0; 1986 } 1987 1988 merge_blocks_nomove (b, c); 1989 return 1; 1990} 1991 1992/* The given edge should potentially a fallthru edge. If that is in 1993 fact true, delete the unconditional jump and barriers that are in 1994 the way. */ 1995 1996static void 1997tidy_fallthru_edge (e, b, c) 1998 edge e; 1999 basic_block b, c; 2000{ 2001 rtx q; 2002 2003 /* ??? In a late-running flow pass, other folks may have deleted basic 2004 blocks by nopping out blocks, leaving multiple BARRIERs between here 2005 and the target label. They ought to be chastized and fixed. 2006 2007 We can also wind up with a sequence of undeletable labels between 2008 one block and the next. 2009 2010 So search through a sequence of barriers, labels, and notes for 2011 the head of block C and assert that we really do fall through. */ 2012 2013 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head))) 2014 return; 2015 2016 /* Remove what will soon cease being the jump insn from the source block. 2017 If block B consisted only of this single jump, turn it into a deleted 2018 note. */ 2019 q = b->end; 2020 if (GET_CODE (q) == JUMP_INSN) 2021 { 2022#ifdef HAVE_cc0 2023 /* If this was a conditional jump, we need to also delete 2024 the insn that set cc0. */ 2025 if (! simplejump_p (q) && condjump_p (q)) 2026 q = PREV_INSN (q); 2027#endif 2028 2029 if (b->head == q) 2030 { 2031 PUT_CODE (q, NOTE); 2032 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED; 2033 NOTE_SOURCE_FILE (q) = 0; 2034 } 2035 else 2036 b->end = q = PREV_INSN (q); 2037 } 2038 2039 /* Selectively unlink the sequence. */ 2040 if (q != PREV_INSN (c->head)) 2041 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head)); 2042 2043 e->flags |= EDGE_FALLTHRU; 2044} 2045 2046/* Discover and record the loop depth at the head of each basic block. */ 2047 2048static void 2049calculate_loop_depth (insns) 2050 rtx insns; 2051{ 2052 basic_block bb; 2053 rtx insn; 2054 int i = 0, depth = 1; 2055 2056 bb = BASIC_BLOCK (i); 2057 for (insn = insns; insn ; insn = NEXT_INSN (insn)) 2058 { 2059 if (insn == bb->head) 2060 { 2061 bb->loop_depth = depth; 2062 if (++i >= n_basic_blocks) 2063 break; 2064 bb = BASIC_BLOCK (i); 2065 } 2066 2067 if (GET_CODE (insn) == NOTE) 2068 { 2069 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG) 2070 depth++; 2071 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END) 2072 depth--; 2073 2074 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */ 2075 if (depth == 0) 2076 abort (); 2077 } 2078 } 2079} 2080 2081/* Perform data flow analysis. 2082 F is the first insn of the function and NREGS the number of register numbers 2083 in use. */ 2084 2085void 2086life_analysis (f, nregs, file, remove_dead_code) 2087 rtx f; 2088 int nregs; 2089 FILE *file; 2090 int remove_dead_code; 2091{ 2092#ifdef ELIMINABLE_REGS 2093 register size_t i; 2094 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS; 2095#endif 2096 2097 /* Record which registers will be eliminated. We use this in 2098 mark_used_regs. */ 2099 2100 CLEAR_HARD_REG_SET (elim_reg_set); 2101 2102#ifdef ELIMINABLE_REGS 2103 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++) 2104 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from); 2105#else 2106 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM); 2107#endif 2108 2109 /* Allocate a bitmap to be filled in by record_volatile_insns. */ 2110 uid_volatile = BITMAP_ALLOCA (); 2111 2112 /* We want alias analysis information for local dead store elimination. */ 2113 init_alias_analysis (); 2114 life_analysis_1 (f, nregs, remove_dead_code); 2115 end_alias_analysis (); 2116 2117 if (file) 2118 dump_flow_info (file); 2119 2120 BITMAP_FREE (uid_volatile); 2121 free_basic_block_vars (1); 2122} 2123 2124/* Free the variables allocated by find_basic_blocks. 2125 2126 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */ 2127 2128void 2129free_basic_block_vars (keep_head_end_p) 2130 int keep_head_end_p; 2131{ 2132 if (basic_block_for_insn) 2133 { 2134 VARRAY_FREE (basic_block_for_insn); 2135 basic_block_for_insn = NULL; 2136 } 2137 2138 if (! keep_head_end_p) 2139 { 2140 clear_edges (); 2141 VARRAY_FREE (basic_block_info); 2142 n_basic_blocks = 0; 2143 2144 ENTRY_BLOCK_PTR->aux = NULL; 2145 ENTRY_BLOCK_PTR->global_live_at_end = NULL; 2146 EXIT_BLOCK_PTR->aux = NULL; 2147 EXIT_BLOCK_PTR->global_live_at_start = NULL; 2148 } 2149} 2150 2151/* Return nonzero if the destination of SET equals the source. */ 2152static int 2153set_noop_p (set) 2154 rtx set; 2155{ 2156 rtx src = SET_SRC (set); 2157 rtx dst = SET_DEST (set); 2158 if (GET_CODE (src) == REG && GET_CODE (dst) == REG 2159 && REGNO (src) == REGNO (dst)) 2160 return 1; 2161 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG 2162 || SUBREG_WORD (src) != SUBREG_WORD (dst)) 2163 return 0; 2164 src = SUBREG_REG (src); 2165 dst = SUBREG_REG (dst); 2166 if (GET_CODE (src) == REG && GET_CODE (dst) == REG 2167 && REGNO (src) == REGNO (dst)) 2168 return 1; 2169 return 0; 2170} 2171 2172/* Return nonzero if an insn consists only of SETs, each of which only sets a 2173 value to itself. */ 2174static int 2175noop_move_p (insn) 2176 rtx insn; 2177{ 2178 rtx pat = PATTERN (insn); 2179 2180 /* Insns carrying these notes are useful later on. */ 2181 if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) 2182 return 0; 2183 2184 if (GET_CODE (pat) == SET && set_noop_p (pat)) 2185 return 1; 2186 2187 if (GET_CODE (pat) == PARALLEL) 2188 { 2189 int i; 2190 /* If nothing but SETs of registers to themselves, 2191 this insn can also be deleted. */ 2192 for (i = 0; i < XVECLEN (pat, 0); i++) 2193 { 2194 rtx tem = XVECEXP (pat, 0, i); 2195 2196 if (GET_CODE (tem) == USE 2197 || GET_CODE (tem) == CLOBBER) 2198 continue; 2199 2200 if (GET_CODE (tem) != SET || ! set_noop_p (tem)) 2201 return 0; 2202 } 2203 2204 return 1; 2205 } 2206 return 0; 2207} 2208 2209static void 2210notice_stack_pointer_modification (x, pat) 2211 rtx x; 2212 rtx pat ATTRIBUTE_UNUSED; 2213{ 2214 if (x == stack_pointer_rtx 2215 /* The stack pointer is only modified indirectly as the result 2216 of a push until later in flow. See the comments in rtl.texi 2217 regarding Embedded Side-Effects on Addresses. */ 2218 || (GET_CODE (x) == MEM 2219 && (GET_CODE (XEXP (x, 0)) == PRE_DEC 2220 || GET_CODE (XEXP (x, 0)) == PRE_INC 2221 || GET_CODE (XEXP (x, 0)) == POST_DEC 2222 || GET_CODE (XEXP (x, 0)) == POST_INC) 2223 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx)) 2224 current_function_sp_is_unchanging = 0; 2225} 2226 2227/* Record which insns refer to any volatile memory 2228 or for any reason can't be deleted just because they are dead stores. 2229 Also, delete any insns that copy a register to itself. 2230 And see if the stack pointer is modified. */ 2231static void 2232record_volatile_insns (f) 2233 rtx f; 2234{ 2235 rtx insn; 2236 for (insn = f; insn; insn = NEXT_INSN (insn)) 2237 { 2238 enum rtx_code code1 = GET_CODE (insn); 2239 if (code1 == CALL_INSN) 2240 SET_INSN_VOLATILE (insn); 2241 else if (code1 == INSN || code1 == JUMP_INSN) 2242 { 2243 if (GET_CODE (PATTERN (insn)) != USE 2244 && volatile_refs_p (PATTERN (insn))) 2245 SET_INSN_VOLATILE (insn); 2246 2247 /* A SET that makes space on the stack cannot be dead. 2248 (Such SETs occur only for allocating variable-size data, 2249 so they will always have a PLUS or MINUS according to the 2250 direction of stack growth.) 2251 Even if this function never uses this stack pointer value, 2252 signal handlers do! */ 2253 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET 2254 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx 2255#ifdef STACK_GROWS_DOWNWARD 2256 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS 2257#else 2258 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS 2259#endif 2260 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx) 2261 SET_INSN_VOLATILE (insn); 2262 2263 /* Delete (in effect) any obvious no-op moves. */ 2264 else if (noop_move_p (insn)) 2265 { 2266 PUT_CODE (insn, NOTE); 2267 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 2268 NOTE_SOURCE_FILE (insn) = 0; 2269 } 2270 } 2271 2272 /* Check if insn modifies the stack pointer. */ 2273 if ( current_function_sp_is_unchanging 2274 && GET_RTX_CLASS (GET_CODE (insn)) == 'i') 2275 note_stores (PATTERN (insn), notice_stack_pointer_modification); 2276 } 2277} 2278 2279/* Mark those regs which are needed at the end of the function as live 2280 at the end of the last basic block. */ 2281static void 2282mark_regs_live_at_end (set) 2283 regset set; 2284{ 2285 int i; 2286 2287 /* If exiting needs the right stack value, consider the stack pointer 2288 live at the end of the function. */ 2289 if (! EXIT_IGNORE_STACK 2290 || (! FRAME_POINTER_REQUIRED 2291 && ! current_function_calls_alloca 2292 && flag_omit_frame_pointer) 2293 || current_function_sp_is_unchanging) 2294 { 2295 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM); 2296 } 2297 2298 /* Mark the frame pointer if needed at the end of the function. If 2299 we end up eliminating it, it will be removed from the live list 2300 of each basic block by reload. */ 2301 2302 if (! reload_completed || frame_pointer_needed) 2303 { 2304 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM); 2305#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 2306 /* If they are different, also mark the hard frame pointer as live */ 2307 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM); 2308#endif 2309 } 2310 2311 /* Mark all global registers, and all registers used by the epilogue 2312 as being live at the end of the function since they may be 2313 referenced by our caller. */ 2314 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2315 if (global_regs[i] 2316#ifdef EPILOGUE_USES 2317 || EPILOGUE_USES (i) 2318#endif 2319 ) 2320 SET_REGNO_REG_SET (set, i); 2321 2322 /* ??? Mark function return value here rather than as uses. */ 2323} 2324 2325/* Determine which registers are live at the start of each 2326 basic block of the function whose first insn is F. 2327 NREGS is the number of registers used in F. 2328 We allocate the vector basic_block_live_at_start 2329 and the regsets that it points to, and fill them with the data. 2330 regset_size and regset_bytes are also set here. */ 2331 2332static void 2333life_analysis_1 (f, nregs, remove_dead_code) 2334 rtx f; 2335 int nregs; 2336 int remove_dead_code; 2337{ 2338 int first_pass; 2339 int changed; 2340 register int i; 2341 char save_regs_ever_live[FIRST_PSEUDO_REGISTER]; 2342 regset *new_live_at_end; 2343 2344 struct obstack flow_obstack; 2345 2346 gcc_obstack_init (&flow_obstack); 2347 2348 max_regno = nregs; 2349 2350 /* Allocate and zero out many data structures 2351 that will record the data from lifetime analysis. */ 2352 2353 allocate_reg_life_data (); 2354 allocate_bb_life_data (); 2355 2356 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx)); 2357 memset (reg_next_use, 0, nregs * sizeof (rtx)); 2358 2359 /* Set up regset-vectors used internally within this function. 2360 Their meanings are documented above, with their declarations. */ 2361 2362 new_live_at_end = (regset *) alloca ((n_basic_blocks + 1) * sizeof (regset)); 2363 init_regset_vector (new_live_at_end, n_basic_blocks + 1, &flow_obstack); 2364 2365 /* Stick these vectors into the AUX field of the basic block, so that 2366 we don't have to keep going through the index. */ 2367 2368 for (i = 0; i < n_basic_blocks; ++i) 2369 BASIC_BLOCK (i)->aux = new_live_at_end[i]; 2370 ENTRY_BLOCK_PTR->aux = new_live_at_end[i]; 2371 2372 /* Assume that the stack pointer is unchanging if alloca hasn't been used. 2373 This will be cleared by record_volatile_insns if it encounters an insn 2374 which modifies the stack pointer. */ 2375 current_function_sp_is_unchanging = !current_function_calls_alloca; 2376 2377 record_volatile_insns (f); 2378 2379 if (n_basic_blocks > 0) 2380 { 2381 regset theend; 2382 register edge e; 2383 2384 theend = EXIT_BLOCK_PTR->global_live_at_start; 2385 mark_regs_live_at_end (theend); 2386 2387 /* Propogate this exit data to each of EXIT's predecessors. */ 2388 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next) 2389 { 2390 COPY_REG_SET (e->src->global_live_at_end, theend); 2391 COPY_REG_SET ((regset) e->src->aux, theend); 2392 } 2393 } 2394 2395 /* The post-reload life analysis have (on a global basis) the same registers 2396 live as was computed by reload itself. 2397 2398 Otherwise elimination offsets and such may be incorrect. 2399 2400 Reload will make some registers as live even though they do not appear 2401 in the rtl. */ 2402 if (reload_completed) 2403 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live)); 2404 memset (regs_ever_live, 0, sizeof regs_ever_live); 2405 2406 /* Propagate life info through the basic blocks 2407 around the graph of basic blocks. 2408 2409 This is a relaxation process: each time a new register 2410 is live at the end of the basic block, we must scan the block 2411 to determine which registers are, as a consequence, live at the beginning 2412 of that block. These registers must then be marked live at the ends 2413 of all the blocks that can transfer control to that block. 2414 The process continues until it reaches a fixed point. */ 2415 2416 first_pass = 1; 2417 changed = 1; 2418 while (changed) 2419 { 2420 changed = 0; 2421 for (i = n_basic_blocks - 1; i >= 0; i--) 2422 { 2423 basic_block bb = BASIC_BLOCK (i); 2424 int consider = first_pass; 2425 int must_rescan = first_pass; 2426 register int j; 2427 2428 if (!first_pass) 2429 { 2430 /* Set CONSIDER if this block needs thinking about at all 2431 (that is, if the regs live now at the end of it 2432 are not the same as were live at the end of it when 2433 we last thought about it). 2434 Set must_rescan if it needs to be thought about 2435 instruction by instruction (that is, if any additional 2436 reg that is live at the end now but was not live there before 2437 is one of the significant regs of this basic block). */ 2438 2439 EXECUTE_IF_AND_COMPL_IN_REG_SET 2440 ((regset) bb->aux, bb->global_live_at_end, 0, j, 2441 { 2442 consider = 1; 2443 if (REGNO_REG_SET_P (bb->local_set, j)) 2444 { 2445 must_rescan = 1; 2446 goto done; 2447 } 2448 }); 2449 done: 2450 if (! consider) 2451 continue; 2452 } 2453 2454 /* The live_at_start of this block may be changing, 2455 so another pass will be required after this one. */ 2456 changed = 1; 2457 2458 if (! must_rescan) 2459 { 2460 /* No complete rescan needed; 2461 just record those variables newly known live at end 2462 as live at start as well. */ 2463 IOR_AND_COMPL_REG_SET (bb->global_live_at_start, 2464 (regset) bb->aux, 2465 bb->global_live_at_end); 2466 2467 IOR_AND_COMPL_REG_SET (bb->global_live_at_end, 2468 (regset) bb->aux, 2469 bb->global_live_at_end); 2470 } 2471 else 2472 { 2473 /* Update the basic_block_live_at_start 2474 by propagation backwards through the block. */ 2475 COPY_REG_SET (bb->global_live_at_end, (regset) bb->aux); 2476 COPY_REG_SET (bb->global_live_at_start, 2477 bb->global_live_at_end); 2478 propagate_block (bb->global_live_at_start, 2479 bb->head, bb->end, 0, 2480 first_pass ? bb->local_set : (regset) 0, 2481 i, remove_dead_code); 2482 } 2483 2484 /* Update the new_live_at_end's of the block's predecessors. */ 2485 { 2486 register edge e; 2487 2488 for (e = bb->pred; e ; e = e->pred_next) 2489 IOR_REG_SET ((regset) e->src->aux, bb->global_live_at_start); 2490 } 2491 2492#ifdef USE_C_ALLOCA 2493 alloca (0); 2494#endif 2495 } 2496 first_pass = 0; 2497 } 2498 2499 /* The only pseudos that are live at the beginning of the function are 2500 those that were not set anywhere in the function. local-alloc doesn't 2501 know how to handle these correctly, so mark them as not local to any 2502 one basic block. */ 2503 2504 if (n_basic_blocks > 0) 2505 EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start, 2506 FIRST_PSEUDO_REGISTER, i, 2507 { 2508 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; 2509 }); 2510 2511 /* Now the life information is accurate. Make one more pass over each 2512 basic block to delete dead stores, create autoincrement addressing 2513 and record how many times each register is used, is set, or dies. */ 2514 2515 for (i = 0; i < n_basic_blocks; i++) 2516 { 2517 basic_block bb = BASIC_BLOCK (i); 2518 2519 /* We start with global_live_at_end to determine which stores are 2520 dead. This process is destructive, and we wish to preserve the 2521 contents of global_live_at_end for posterity. Fortunately, 2522 new_live_at_end, due to the way we converged on a solution, 2523 contains a duplicate of global_live_at_end that we can kill. */ 2524 propagate_block ((regset) bb->aux, bb->head, bb->end, 1, (regset) 0, i, remove_dead_code); 2525 2526#ifdef USE_C_ALLOCA 2527 alloca (0); 2528#endif 2529 } 2530 2531 /* We have a problem with any pseudoreg that lives across the setjmp. 2532 ANSI says that if a user variable does not change in value between 2533 the setjmp and the longjmp, then the longjmp preserves it. This 2534 includes longjmp from a place where the pseudo appears dead. 2535 (In principle, the value still exists if it is in scope.) 2536 If the pseudo goes in a hard reg, some other value may occupy 2537 that hard reg where this pseudo is dead, thus clobbering the pseudo. 2538 Conclusion: such a pseudo must not go in a hard reg. */ 2539 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp, 2540 FIRST_PSEUDO_REGISTER, i, 2541 { 2542 if (regno_reg_rtx[i] != 0) 2543 { 2544 REG_LIVE_LENGTH (i) = -1; 2545 REG_BASIC_BLOCK (i) = -1; 2546 } 2547 }); 2548 2549 /* Restore regs_ever_live that was provided by reload. */ 2550 if (reload_completed) 2551 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live)); 2552 2553 free_regset_vector (new_live_at_end, n_basic_blocks); 2554 obstack_free (&flow_obstack, NULL_PTR); 2555 2556 for (i = 0; i < n_basic_blocks; ++i) 2557 BASIC_BLOCK (i)->aux = NULL; 2558 ENTRY_BLOCK_PTR->aux = NULL; 2559} 2560 2561/* Subroutines of life analysis. */ 2562 2563/* Allocate the permanent data structures that represent the results 2564 of life analysis. Not static since used also for stupid life analysis. */ 2565 2566void 2567allocate_bb_life_data () 2568{ 2569 register int i; 2570 2571 for (i = 0; i < n_basic_blocks; i++) 2572 { 2573 basic_block bb = BASIC_BLOCK (i); 2574 2575 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack); 2576 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack); 2577 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack); 2578 } 2579 2580 ENTRY_BLOCK_PTR->global_live_at_end 2581 = OBSTACK_ALLOC_REG_SET (function_obstack); 2582 EXIT_BLOCK_PTR->global_live_at_start 2583 = OBSTACK_ALLOC_REG_SET (function_obstack); 2584 2585 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack); 2586} 2587 2588void 2589allocate_reg_life_data () 2590{ 2591 int i; 2592 2593 /* Recalculate the register space, in case it has grown. Old style 2594 vector oriented regsets would set regset_{size,bytes} here also. */ 2595 allocate_reg_info (max_regno, FALSE, FALSE); 2596 2597 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS 2598 information, explicitly reset it here. The allocation should have 2599 already happened on the previous reg_scan pass. Make sure in case 2600 some more registers were allocated. */ 2601 for (i = 0; i < max_regno; i++) 2602 REG_N_SETS (i) = 0; 2603} 2604 2605/* Make each element of VECTOR point at a regset. The vector has 2606 NELTS elements, and space is allocated from the ALLOC_OBSTACK 2607 obstack. */ 2608 2609static void 2610init_regset_vector (vector, nelts, alloc_obstack) 2611 regset *vector; 2612 int nelts; 2613 struct obstack *alloc_obstack; 2614{ 2615 register int i; 2616 2617 for (i = 0; i < nelts; i++) 2618 { 2619 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack); 2620 CLEAR_REG_SET (vector[i]); 2621 } 2622} 2623 2624/* Release any additional space allocated for each element of VECTOR point 2625 other than the regset header itself. The vector has NELTS elements. */ 2626 2627void 2628free_regset_vector (vector, nelts) 2629 regset *vector; 2630 int nelts; 2631{ 2632 register int i; 2633 2634 for (i = 0; i < nelts; i++) 2635 FREE_REG_SET (vector[i]); 2636} 2637 2638/* Compute the registers live at the beginning of a basic block 2639 from those live at the end. 2640 2641 When called, OLD contains those live at the end. 2642 On return, it contains those live at the beginning. 2643 FIRST and LAST are the first and last insns of the basic block. 2644 2645 FINAL is nonzero if we are doing the final pass which is not 2646 for computing the life info (since that has already been done) 2647 but for acting on it. On this pass, we delete dead stores, 2648 set up the logical links and dead-variables lists of instructions, 2649 and merge instructions for autoincrement and autodecrement addresses. 2650 2651 SIGNIFICANT is nonzero only the first time for each basic block. 2652 If it is nonzero, it points to a regset in which we store 2653 a 1 for each register that is set within the block. 2654 2655 BNUM is the number of the basic block. */ 2656 2657static void 2658propagate_block (old, first, last, final, significant, bnum, remove_dead_code) 2659 register regset old; 2660 rtx first; 2661 rtx last; 2662 int final; 2663 regset significant; 2664 int bnum; 2665 int remove_dead_code; 2666{ 2667 register rtx insn; 2668 rtx prev; 2669 regset live; 2670 regset dead; 2671 2672 /* Find the loop depth for this block. Ignore loop level changes in the 2673 middle of the basic block -- for register allocation purposes, the 2674 important uses will be in the blocks wholely contained within the loop 2675 not in the loop pre-header or post-trailer. */ 2676 loop_depth = BASIC_BLOCK (bnum)->loop_depth; 2677 2678 dead = ALLOCA_REG_SET (); 2679 live = ALLOCA_REG_SET (); 2680 2681 cc0_live = 0; 2682 mem_set_list = NULL_RTX; 2683 2684 if (final) 2685 { 2686 register int i; 2687 2688 /* Process the regs live at the end of the block. 2689 Mark them as not local to any one basic block. */ 2690 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, 2691 { 2692 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; 2693 }); 2694 } 2695 2696 /* Scan the block an insn at a time from end to beginning. */ 2697 2698 for (insn = last; ; insn = prev) 2699 { 2700 prev = PREV_INSN (insn); 2701 2702 if (GET_CODE (insn) == NOTE) 2703 { 2704 /* If this is a call to `setjmp' et al, 2705 warn if any non-volatile datum is live. */ 2706 2707 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) 2708 IOR_REG_SET (regs_live_at_setjmp, old); 2709 } 2710 2711 /* Update the life-status of regs for this insn. 2712 First DEAD gets which regs are set in this insn 2713 then LIVE gets which regs are used in this insn. 2714 Then the regs live before the insn 2715 are those live after, with DEAD regs turned off, 2716 and then LIVE regs turned on. */ 2717 2718 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') 2719 { 2720 register int i; 2721 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX); 2722 int insn_is_dead = 0; 2723 int libcall_is_dead = 0; 2724 2725 if (remove_dead_code) 2726 { 2727 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn)) 2728 /* Don't delete something that refers to volatile storage! */ 2729 && ! INSN_VOLATILE (insn)); 2730 libcall_is_dead = (insn_is_dead && note != 0 2731 && libcall_dead_p (PATTERN (insn), old, note, insn)); 2732 } 2733 2734 /* If an instruction consists of just dead store(s) on final pass, 2735 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED. 2736 We could really delete it with delete_insn, but that 2737 can cause trouble for first or last insn in a basic block. */ 2738 if (final && insn_is_dead) 2739 { 2740 rtx inote; 2741 /* If the insn referred to a label, note that the label is 2742 now less used. */ 2743 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1)) 2744 { 2745 if (REG_NOTE_KIND (inote) == REG_LABEL) 2746 { 2747 rtx label = XEXP (inote, 0); 2748 rtx next; 2749 LABEL_NUSES (label)--; 2750 2751 /* If this label was attached to an ADDR_VEC, it's 2752 safe to delete the ADDR_VEC. In fact, it's pretty much 2753 mandatory to delete it, because the ADDR_VEC may 2754 be referencing labels that no longer exist. */ 2755 if (LABEL_NUSES (label) == 0 2756 && (next = next_nonnote_insn (label)) != NULL 2757 && GET_CODE (next) == JUMP_INSN 2758 && (GET_CODE (PATTERN (next)) == ADDR_VEC 2759 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) 2760 { 2761 rtx pat = PATTERN (next); 2762 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC; 2763 int len = XVECLEN (pat, diff_vec_p); 2764 int i; 2765 for (i = 0; i < len; i++) 2766 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--; 2767 2768 flow_delete_insn (next); 2769 } 2770 } 2771 } 2772 2773 PUT_CODE (insn, NOTE); 2774 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 2775 NOTE_SOURCE_FILE (insn) = 0; 2776 2777 /* CC0 is now known to be dead. Either this insn used it, 2778 in which case it doesn't anymore, or clobbered it, 2779 so the next insn can't use it. */ 2780 cc0_live = 0; 2781 2782 /* If this insn is copying the return value from a library call, 2783 delete the entire library call. */ 2784 if (libcall_is_dead) 2785 { 2786 rtx first = XEXP (note, 0); 2787 rtx p = insn; 2788 while (INSN_DELETED_P (first)) 2789 first = NEXT_INSN (first); 2790 while (p != first) 2791 { 2792 p = PREV_INSN (p); 2793 PUT_CODE (p, NOTE); 2794 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED; 2795 NOTE_SOURCE_FILE (p) = 0; 2796 } 2797 } 2798 goto flushed; 2799 } 2800 2801 CLEAR_REG_SET (dead); 2802 CLEAR_REG_SET (live); 2803 2804 /* See if this is an increment or decrement that can be 2805 merged into a following memory address. */ 2806#ifdef AUTO_INC_DEC 2807 { 2808 register rtx x = single_set (insn); 2809 2810 /* Does this instruction increment or decrement a register? */ 2811 if (!reload_completed 2812 && final && x != 0 2813 && GET_CODE (SET_DEST (x)) == REG 2814 && (GET_CODE (SET_SRC (x)) == PLUS 2815 || GET_CODE (SET_SRC (x)) == MINUS) 2816 && XEXP (SET_SRC (x), 0) == SET_DEST (x) 2817 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT 2818 /* Ok, look for a following memory ref we can combine with. 2819 If one is found, change the memory ref to a PRE_INC 2820 or PRE_DEC, cancel this insn, and return 1. 2821 Return 0 if nothing has been done. */ 2822 && try_pre_increment_1 (insn)) 2823 goto flushed; 2824 } 2825#endif /* AUTO_INC_DEC */ 2826 2827 /* If this is not the final pass, and this insn is copying the 2828 value of a library call and it's dead, don't scan the 2829 insns that perform the library call, so that the call's 2830 arguments are not marked live. */ 2831 if (libcall_is_dead) 2832 { 2833 /* Mark the dest reg as `significant'. */ 2834 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant); 2835 2836 insn = XEXP (note, 0); 2837 prev = PREV_INSN (insn); 2838 } 2839 else if (GET_CODE (PATTERN (insn)) == SET 2840 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx 2841 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS 2842 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx 2843 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT) 2844 /* We have an insn to pop a constant amount off the stack. 2845 (Such insns use PLUS regardless of the direction of the stack, 2846 and any insn to adjust the stack by a constant is always a pop.) 2847 These insns, if not dead stores, have no effect on life. */ 2848 ; 2849 else 2850 { 2851 /* Any regs live at the time of a call instruction 2852 must not go in a register clobbered by calls. 2853 Find all regs now live and record this for them. */ 2854 2855 if (GET_CODE (insn) == CALL_INSN && final) 2856 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, 2857 { 2858 REG_N_CALLS_CROSSED (i)++; 2859 }); 2860 2861 /* LIVE gets the regs used in INSN; 2862 DEAD gets those set by it. Dead insns don't make anything 2863 live. */ 2864 2865 mark_set_regs (old, dead, PATTERN (insn), 2866 final ? insn : NULL_RTX, significant); 2867 2868 /* If an insn doesn't use CC0, it becomes dead since we 2869 assume that every insn clobbers it. So show it dead here; 2870 mark_used_regs will set it live if it is referenced. */ 2871 cc0_live = 0; 2872 2873 if (! insn_is_dead) 2874 mark_used_regs (old, live, PATTERN (insn), final, insn); 2875 2876 /* Sometimes we may have inserted something before INSN (such as 2877 a move) when we make an auto-inc. So ensure we will scan 2878 those insns. */ 2879#ifdef AUTO_INC_DEC 2880 prev = PREV_INSN (insn); 2881#endif 2882 2883 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN) 2884 { 2885 register int i; 2886 2887 rtx note; 2888 2889 for (note = CALL_INSN_FUNCTION_USAGE (insn); 2890 note; 2891 note = XEXP (note, 1)) 2892 if (GET_CODE (XEXP (note, 0)) == USE) 2893 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)), 2894 final, insn); 2895 2896 /* Each call clobbers all call-clobbered regs that are not 2897 global or fixed. Note that the function-value reg is a 2898 call-clobbered reg, and mark_set_regs has already had 2899 a chance to handle it. */ 2900 2901 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2902 if (call_used_regs[i] && ! global_regs[i] 2903 && ! fixed_regs[i]) 2904 SET_REGNO_REG_SET (dead, i); 2905 2906 /* The stack ptr is used (honorarily) by a CALL insn. */ 2907 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM); 2908 2909 /* Calls may also reference any of the global registers, 2910 so they are made live. */ 2911 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2912 if (global_regs[i]) 2913 mark_used_regs (old, live, 2914 gen_rtx_REG (reg_raw_mode[i], i), 2915 final, insn); 2916 2917 /* Calls also clobber memory. */ 2918 mem_set_list = NULL_RTX; 2919 } 2920 2921 /* Update OLD for the registers used or set. */ 2922 AND_COMPL_REG_SET (old, dead); 2923 IOR_REG_SET (old, live); 2924 2925 } 2926 2927 /* On final pass, update counts of how many insns each reg is live 2928 at. */ 2929 if (final) 2930 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, 2931 { REG_LIVE_LENGTH (i)++; }); 2932 } 2933 flushed: ; 2934 if (insn == first) 2935 break; 2936 } 2937 2938 FREE_REG_SET (dead); 2939 FREE_REG_SET (live); 2940} 2941 2942/* Return 1 if X (the body of an insn, or part of it) is just dead stores 2943 (SET expressions whose destinations are registers dead after the insn). 2944 NEEDED is the regset that says which regs are alive after the insn. 2945 2946 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. 2947 2948 If X is the entire body of an insn, NOTES contains the reg notes 2949 pertaining to the insn. */ 2950 2951static int 2952insn_dead_p (x, needed, call_ok, notes) 2953 rtx x; 2954 regset needed; 2955 int call_ok; 2956 rtx notes ATTRIBUTE_UNUSED; 2957{ 2958 enum rtx_code code = GET_CODE (x); 2959 2960#ifdef AUTO_INC_DEC 2961 /* If flow is invoked after reload, we must take existing AUTO_INC 2962 expresions into account. */ 2963 if (reload_completed) 2964 { 2965 for ( ; notes; notes = XEXP (notes, 1)) 2966 { 2967 if (REG_NOTE_KIND (notes) == REG_INC) 2968 { 2969 int regno = REGNO (XEXP (notes, 0)); 2970 2971 /* Don't delete insns to set global regs. */ 2972 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno]) 2973 || REGNO_REG_SET_P (needed, regno)) 2974 return 0; 2975 } 2976 } 2977 } 2978#endif 2979 2980 /* If setting something that's a reg or part of one, 2981 see if that register's altered value will be live. */ 2982 2983 if (code == SET) 2984 { 2985 rtx r = SET_DEST (x); 2986 2987 /* A SET that is a subroutine call cannot be dead. */ 2988 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL) 2989 return 0; 2990 2991#ifdef HAVE_cc0 2992 if (GET_CODE (r) == CC0) 2993 return ! cc0_live; 2994#endif 2995 2996 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r)) 2997 { 2998 rtx temp; 2999 /* Walk the set of memory locations we are currently tracking 3000 and see if one is an identical match to this memory location. 3001 If so, this memory write is dead (remember, we're walking 3002 backwards from the end of the block to the start. */ 3003 temp = mem_set_list; 3004 while (temp) 3005 { 3006 if (rtx_equal_p (XEXP (temp, 0), r)) 3007 return 1; 3008 temp = XEXP (temp, 1); 3009 } 3010 } 3011 3012 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART 3013 || GET_CODE (r) == ZERO_EXTRACT) 3014 r = SUBREG_REG (r); 3015 3016 if (GET_CODE (r) == REG) 3017 { 3018 int regno = REGNO (r); 3019 3020 /* Don't delete insns to set global regs. */ 3021 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno]) 3022 /* Make sure insns to set frame pointer aren't deleted. */ 3023 || (regno == FRAME_POINTER_REGNUM 3024 && (! reload_completed || frame_pointer_needed)) 3025#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 3026 || (regno == HARD_FRAME_POINTER_REGNUM 3027 && (! reload_completed || frame_pointer_needed)) 3028#endif 3029#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 3030 /* Make sure insns to set arg pointer are never deleted 3031 (if the arg pointer isn't fixed, there will be a USE for 3032 it, so we can treat it normally). */ 3033 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) 3034#endif 3035 || REGNO_REG_SET_P (needed, regno)) 3036 return 0; 3037 3038 /* If this is a hard register, verify that subsequent words are 3039 not needed. */ 3040 if (regno < FIRST_PSEUDO_REGISTER) 3041 { 3042 int n = HARD_REGNO_NREGS (regno, GET_MODE (r)); 3043 3044 while (--n > 0) 3045 if (REGNO_REG_SET_P (needed, regno+n)) 3046 return 0; 3047 } 3048 3049 return 1; 3050 } 3051 } 3052 3053 /* If performing several activities, 3054 insn is dead if each activity is individually dead. 3055 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE 3056 that's inside a PARALLEL doesn't make the insn worth keeping. */ 3057 else if (code == PARALLEL) 3058 { 3059 int i = XVECLEN (x, 0); 3060 3061 for (i--; i >= 0; i--) 3062 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER 3063 && GET_CODE (XVECEXP (x, 0, i)) != USE 3064 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX)) 3065 return 0; 3066 3067 return 1; 3068 } 3069 3070 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That 3071 is not necessarily true for hard registers. */ 3072 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG 3073 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER 3074 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0)))) 3075 return 1; 3076 3077 /* We do not check other CLOBBER or USE here. An insn consisting of just 3078 a CLOBBER or just a USE should not be deleted. */ 3079 return 0; 3080} 3081 3082/* If X is the pattern of the last insn in a libcall, and assuming X is dead, 3083 return 1 if the entire library call is dead. 3084 This is true if X copies a register (hard or pseudo) 3085 and if the hard return reg of the call insn is dead. 3086 (The caller should have tested the destination of X already for death.) 3087 3088 If this insn doesn't just copy a register, then we don't 3089 have an ordinary libcall. In that case, cse could not have 3090 managed to substitute the source for the dest later on, 3091 so we can assume the libcall is dead. 3092 3093 NEEDED is the bit vector of pseudoregs live before this insn. 3094 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */ 3095 3096static int 3097libcall_dead_p (x, needed, note, insn) 3098 rtx x; 3099 regset needed; 3100 rtx note; 3101 rtx insn; 3102{ 3103 register RTX_CODE code = GET_CODE (x); 3104 3105 if (code == SET) 3106 { 3107 register rtx r = SET_SRC (x); 3108 if (GET_CODE (r) == REG) 3109 { 3110 rtx call = XEXP (note, 0); 3111 rtx call_pat; 3112 register int i; 3113 3114 /* Find the call insn. */ 3115 while (call != insn && GET_CODE (call) != CALL_INSN) 3116 call = NEXT_INSN (call); 3117 3118 /* If there is none, do nothing special, 3119 since ordinary death handling can understand these insns. */ 3120 if (call == insn) 3121 return 0; 3122 3123 /* See if the hard reg holding the value is dead. 3124 If this is a PARALLEL, find the call within it. */ 3125 call_pat = PATTERN (call); 3126 if (GET_CODE (call_pat) == PARALLEL) 3127 { 3128 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--) 3129 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET 3130 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL) 3131 break; 3132 3133 /* This may be a library call that is returning a value 3134 via invisible pointer. Do nothing special, since 3135 ordinary death handling can understand these insns. */ 3136 if (i < 0) 3137 return 0; 3138 3139 call_pat = XVECEXP (call_pat, 0, i); 3140 } 3141 3142 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call)); 3143 } 3144 } 3145 return 1; 3146} 3147 3148/* Return 1 if register REGNO was used before it was set, i.e. if it is 3149 live at function entry. Don't count global register variables, variables 3150 in registers that can be used for function arg passing, or variables in 3151 fixed hard registers. */ 3152 3153int 3154regno_uninitialized (regno) 3155 int regno; 3156{ 3157 if (n_basic_blocks == 0 3158 || (regno < FIRST_PSEUDO_REGISTER 3159 && (global_regs[regno] 3160 || fixed_regs[regno] 3161 || FUNCTION_ARG_REGNO_P (regno)))) 3162 return 0; 3163 3164 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno); 3165} 3166 3167/* 1 if register REGNO was alive at a place where `setjmp' was called 3168 and was set more than once or is an argument. 3169 Such regs may be clobbered by `longjmp'. */ 3170 3171int 3172regno_clobbered_at_setjmp (regno) 3173 int regno; 3174{ 3175 if (n_basic_blocks == 0) 3176 return 0; 3177 3178 return ((REG_N_SETS (regno) > 1 3179 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno)) 3180 && REGNO_REG_SET_P (regs_live_at_setjmp, regno)); 3181} 3182 3183/* INSN references memory, possibly using autoincrement addressing modes. 3184 Find any entries on the mem_set_list that need to be invalidated due 3185 to an address change. */ 3186static void 3187invalidate_mems_from_autoinc (insn) 3188 rtx insn; 3189{ 3190 rtx note = REG_NOTES (insn); 3191 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 3192 { 3193 if (REG_NOTE_KIND (note) == REG_INC) 3194 { 3195 rtx temp = mem_set_list; 3196 rtx prev = NULL_RTX; 3197 3198 while (temp) 3199 { 3200 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0))) 3201 { 3202 /* Splice temp out of list. */ 3203 if (prev) 3204 XEXP (prev, 1) = XEXP (temp, 1); 3205 else 3206 mem_set_list = XEXP (temp, 1); 3207 } 3208 else 3209 prev = temp; 3210 temp = XEXP (temp, 1); 3211 } 3212 } 3213 } 3214} 3215 3216/* Process the registers that are set within X. 3217 Their bits are set to 1 in the regset DEAD, 3218 because they are dead prior to this insn. 3219 3220 If INSN is nonzero, it is the insn being processed 3221 and the fact that it is nonzero implies this is the FINAL pass 3222 in propagate_block. In this case, various info about register 3223 usage is stored, LOG_LINKS fields of insns are set up. */ 3224 3225static void 3226mark_set_regs (needed, dead, x, insn, significant) 3227 regset needed; 3228 regset dead; 3229 rtx x; 3230 rtx insn; 3231 regset significant; 3232{ 3233 register RTX_CODE code = GET_CODE (x); 3234 3235 if (code == SET || code == CLOBBER) 3236 mark_set_1 (needed, dead, x, insn, significant); 3237 else if (code == PARALLEL) 3238 { 3239 register int i; 3240 for (i = XVECLEN (x, 0) - 1; i >= 0; i--) 3241 { 3242 code = GET_CODE (XVECEXP (x, 0, i)); 3243 if (code == SET || code == CLOBBER) 3244 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant); 3245 } 3246 } 3247} 3248 3249/* Process a single SET rtx, X. */ 3250 3251static void 3252mark_set_1 (needed, dead, x, insn, significant) 3253 regset needed; 3254 regset dead; 3255 rtx x; 3256 rtx insn; 3257 regset significant; 3258{ 3259 register int regno; 3260 register rtx reg = SET_DEST (x); 3261 3262 /* Some targets place small structures in registers for 3263 return values of functions. We have to detect this 3264 case specially here to get correct flow information. */ 3265 if (GET_CODE (reg) == PARALLEL 3266 && GET_MODE (reg) == BLKmode) 3267 { 3268 register int i; 3269 3270 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--) 3271 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant); 3272 return; 3273 } 3274 3275 /* Modifying just one hardware register of a multi-reg value 3276 or just a byte field of a register 3277 does not mean the value from before this insn is now dead. 3278 But it does mean liveness of that register at the end of the block 3279 is significant. 3280 3281 Within mark_set_1, however, we treat it as if the register is 3282 indeed modified. mark_used_regs will, however, also treat this 3283 register as being used. Thus, we treat these insns as setting a 3284 new value for the register as a function of its old value. This 3285 cases LOG_LINKS to be made appropriately and this will help combine. */ 3286 3287 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT 3288 || GET_CODE (reg) == SIGN_EXTRACT 3289 || GET_CODE (reg) == STRICT_LOW_PART) 3290 reg = XEXP (reg, 0); 3291 3292 /* If this set is a MEM, then it kills any aliased writes. 3293 If this set is a REG, then it kills any MEMs which use the reg. */ 3294 if (GET_CODE (reg) == MEM 3295 || GET_CODE (reg) == REG) 3296 { 3297 rtx temp = mem_set_list; 3298 rtx prev = NULL_RTX; 3299 3300 while (temp) 3301 { 3302 if ((GET_CODE (reg) == MEM 3303 && output_dependence (XEXP (temp, 0), reg)) 3304 || (GET_CODE (reg) == REG 3305 && reg_overlap_mentioned_p (reg, XEXP (temp, 0)))) 3306 { 3307 /* Splice this entry out of the list. */ 3308 if (prev) 3309 XEXP (prev, 1) = XEXP (temp, 1); 3310 else 3311 mem_set_list = XEXP (temp, 1); 3312 } 3313 else 3314 prev = temp; 3315 temp = XEXP (temp, 1); 3316 } 3317 } 3318 3319 /* If the memory reference had embedded side effects (autoincrement 3320 address modes. Then we may need to kill some entries on the 3321 memory set list. */ 3322 if (insn && GET_CODE (reg) == MEM) 3323 invalidate_mems_from_autoinc (insn); 3324 3325 if (GET_CODE (reg) == MEM && ! side_effects_p (reg) 3326 /* We do not know the size of a BLKmode store, so we do not track 3327 them for redundant store elimination. */ 3328 && GET_MODE (reg) != BLKmode 3329 /* There are no REG_INC notes for SP, so we can't assume we'll see 3330 everything that invalidates it. To be safe, don't eliminate any 3331 stores though SP; none of them should be redundant anyway. */ 3332 && ! reg_mentioned_p (stack_pointer_rtx, reg)) 3333 mem_set_list = gen_rtx_EXPR_LIST (VOIDmode, reg, mem_set_list); 3334 3335 if (GET_CODE (reg) == REG 3336 && (regno = REGNO (reg), ! (regno == FRAME_POINTER_REGNUM 3337 && (! reload_completed || frame_pointer_needed))) 3338#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 3339 && ! (regno == HARD_FRAME_POINTER_REGNUM 3340 && (! reload_completed || frame_pointer_needed)) 3341#endif 3342#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 3343 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) 3344#endif 3345 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])) 3346 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */ 3347 { 3348 int some_needed = REGNO_REG_SET_P (needed, regno); 3349 int some_not_needed = ! some_needed; 3350 3351 /* Mark it as a significant register for this basic block. */ 3352 if (significant) 3353 SET_REGNO_REG_SET (significant, regno); 3354 3355 /* Mark it as dead before this insn. */ 3356 SET_REGNO_REG_SET (dead, regno); 3357 3358 /* A hard reg in a wide mode may really be multiple registers. 3359 If so, mark all of them just like the first. */ 3360 if (regno < FIRST_PSEUDO_REGISTER) 3361 { 3362 int n; 3363 3364 /* Nothing below is needed for the stack pointer; get out asap. 3365 Eg, log links aren't needed, since combine won't use them. */ 3366 if (regno == STACK_POINTER_REGNUM) 3367 return; 3368 3369 n = HARD_REGNO_NREGS (regno, GET_MODE (reg)); 3370 while (--n > 0) 3371 { 3372 int regno_n = regno + n; 3373 int needed_regno = REGNO_REG_SET_P (needed, regno_n); 3374 if (significant) 3375 SET_REGNO_REG_SET (significant, regno_n); 3376 3377 SET_REGNO_REG_SET (dead, regno_n); 3378 some_needed |= needed_regno; 3379 some_not_needed |= ! needed_regno; 3380 } 3381 } 3382 /* Additional data to record if this is the final pass. */ 3383 if (insn) 3384 { 3385 register rtx y = reg_next_use[regno]; 3386 register int blocknum = BLOCK_NUM (insn); 3387 3388 /* If this is a hard reg, record this function uses the reg. */ 3389 3390 if (regno < FIRST_PSEUDO_REGISTER) 3391 { 3392 register int i; 3393 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)); 3394 3395 for (i = regno; i < endregno; i++) 3396 { 3397 /* The next use is no longer "next", since a store 3398 intervenes. */ 3399 reg_next_use[i] = 0; 3400 3401 regs_ever_live[i] = 1; 3402 REG_N_SETS (i)++; 3403 } 3404 } 3405 else 3406 { 3407 /* The next use is no longer "next", since a store 3408 intervenes. */ 3409 reg_next_use[regno] = 0; 3410 3411 /* Keep track of which basic blocks each reg appears in. */ 3412 3413 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN) 3414 REG_BASIC_BLOCK (regno) = blocknum; 3415 else if (REG_BASIC_BLOCK (regno) != blocknum) 3416 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL; 3417 3418 /* Count (weighted) references, stores, etc. This counts a 3419 register twice if it is modified, but that is correct. */ 3420 REG_N_SETS (regno)++; 3421 3422 REG_N_REFS (regno) += loop_depth; 3423 3424 /* The insns where a reg is live are normally counted 3425 elsewhere, but we want the count to include the insn 3426 where the reg is set, and the normal counting mechanism 3427 would not count it. */ 3428 REG_LIVE_LENGTH (regno)++; 3429 } 3430 3431 if (! some_not_needed) 3432 { 3433 /* Make a logical link from the next following insn 3434 that uses this register, back to this insn. 3435 The following insns have already been processed. 3436 3437 We don't build a LOG_LINK for hard registers containing 3438 in ASM_OPERANDs. If these registers get replaced, 3439 we might wind up changing the semantics of the insn, 3440 even if reload can make what appear to be valid assignments 3441 later. */ 3442 if (y && (BLOCK_NUM (y) == blocknum) 3443 && (regno >= FIRST_PSEUDO_REGISTER 3444 || asm_noperands (PATTERN (y)) < 0)) 3445 LOG_LINKS (y) 3446 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y)); 3447 } 3448 else if (! some_needed) 3449 { 3450 /* Note that dead stores have already been deleted when possible 3451 If we get here, we have found a dead store that cannot 3452 be eliminated (because the same insn does something useful). 3453 Indicate this by marking the reg being set as dying here. */ 3454 REG_NOTES (insn) 3455 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn)); 3456 REG_N_DEATHS (REGNO (reg))++; 3457 } 3458 else 3459 { 3460 /* This is a case where we have a multi-word hard register 3461 and some, but not all, of the words of the register are 3462 needed in subsequent insns. Write REG_UNUSED notes 3463 for those parts that were not needed. This case should 3464 be rare. */ 3465 3466 int i; 3467 3468 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; 3469 i >= 0; i--) 3470 if (!REGNO_REG_SET_P (needed, regno + i)) 3471 REG_NOTES (insn) 3472 = gen_rtx_EXPR_LIST (REG_UNUSED, 3473 gen_rtx_REG (reg_raw_mode[regno + i], 3474 regno + i), 3475 REG_NOTES (insn)); 3476 } 3477 } 3478 } 3479 else if (GET_CODE (reg) == REG) 3480 reg_next_use[regno] = 0; 3481 3482 /* If this is the last pass and this is a SCRATCH, show it will be dying 3483 here and count it. */ 3484 else if (GET_CODE (reg) == SCRATCH && insn != 0) 3485 { 3486 REG_NOTES (insn) 3487 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn)); 3488 } 3489} 3490 3491#ifdef AUTO_INC_DEC 3492 3493/* X is a MEM found in INSN. See if we can convert it into an auto-increment 3494 reference. */ 3495 3496static void 3497find_auto_inc (needed, x, insn) 3498 regset needed; 3499 rtx x; 3500 rtx insn; 3501{ 3502 rtx addr = XEXP (x, 0); 3503 HOST_WIDE_INT offset = 0; 3504 rtx set; 3505 3506 /* Here we detect use of an index register which might be good for 3507 postincrement, postdecrement, preincrement, or predecrement. */ 3508 3509 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT) 3510 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0); 3511 3512 if (GET_CODE (addr) == REG) 3513 { 3514 register rtx y; 3515 register int size = GET_MODE_SIZE (GET_MODE (x)); 3516 rtx use; 3517 rtx incr; 3518 int regno = REGNO (addr); 3519 3520 /* Is the next use an increment that might make auto-increment? */ 3521 if ((incr = reg_next_use[regno]) != 0 3522 && (set = single_set (incr)) != 0 3523 && GET_CODE (set) == SET 3524 && BLOCK_NUM (incr) == BLOCK_NUM (insn) 3525 /* Can't add side effects to jumps; if reg is spilled and 3526 reloaded, there's no way to store back the altered value. */ 3527 && GET_CODE (insn) != JUMP_INSN 3528 && (y = SET_SRC (set), GET_CODE (y) == PLUS) 3529 && XEXP (y, 0) == addr 3530 && GET_CODE (XEXP (y, 1)) == CONST_INT 3531 && ((HAVE_POST_INCREMENT 3532 && (INTVAL (XEXP (y, 1)) == size && offset == 0)) 3533 || (HAVE_POST_DECREMENT 3534 && (INTVAL (XEXP (y, 1)) == - size && offset == 0)) 3535 || (HAVE_PRE_INCREMENT 3536 && (INTVAL (XEXP (y, 1)) == size && offset == size)) 3537 || (HAVE_PRE_DECREMENT 3538 && (INTVAL (XEXP (y, 1)) == - size && offset == - size))) 3539 /* Make sure this reg appears only once in this insn. */ 3540 && (use = find_use_as_address (PATTERN (insn), addr, offset), 3541 use != 0 && use != (rtx) 1)) 3542 { 3543 rtx q = SET_DEST (set); 3544 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size 3545 ? (offset ? PRE_INC : POST_INC) 3546 : (offset ? PRE_DEC : POST_DEC)); 3547 3548 if (dead_or_set_p (incr, addr)) 3549 { 3550 /* This is the simple case. Try to make the auto-inc. If 3551 we can't, we are done. Otherwise, we will do any 3552 needed updates below. */ 3553 if (! validate_change (insn, &XEXP (x, 0), 3554 gen_rtx_fmt_e (inc_code, Pmode, addr), 3555 0)) 3556 return; 3557 } 3558 else if (GET_CODE (q) == REG 3559 /* PREV_INSN used here to check the semi-open interval 3560 [insn,incr). */ 3561 && ! reg_used_between_p (q, PREV_INSN (insn), incr) 3562 /* We must also check for sets of q as q may be 3563 a call clobbered hard register and there may 3564 be a call between PREV_INSN (insn) and incr. */ 3565 && ! reg_set_between_p (q, PREV_INSN (insn), incr)) 3566 { 3567 /* We have *p followed sometime later by q = p+size. 3568 Both p and q must be live afterward, 3569 and q is not used between INSN and its assignment. 3570 Change it to q = p, ...*q..., q = q+size. 3571 Then fall into the usual case. */ 3572 rtx insns, temp; 3573 basic_block bb; 3574 3575 start_sequence (); 3576 emit_move_insn (q, addr); 3577 insns = get_insns (); 3578 end_sequence (); 3579 3580 bb = BLOCK_FOR_INSN (insn); 3581 for (temp = insns; temp; temp = NEXT_INSN (temp)) 3582 set_block_for_insn (temp, bb); 3583 3584 /* If we can't make the auto-inc, or can't make the 3585 replacement into Y, exit. There's no point in making 3586 the change below if we can't do the auto-inc and doing 3587 so is not correct in the pre-inc case. */ 3588 3589 validate_change (insn, &XEXP (x, 0), 3590 gen_rtx_fmt_e (inc_code, Pmode, q), 3591 1); 3592 validate_change (incr, &XEXP (y, 0), q, 1); 3593 if (! apply_change_group ()) 3594 return; 3595 3596 /* We now know we'll be doing this change, so emit the 3597 new insn(s) and do the updates. */ 3598 emit_insns_before (insns, insn); 3599 3600 if (BLOCK_FOR_INSN (insn)->head == insn) 3601 BLOCK_FOR_INSN (insn)->head = insns; 3602 3603 /* INCR will become a NOTE and INSN won't contain a 3604 use of ADDR. If a use of ADDR was just placed in 3605 the insn before INSN, make that the next use. 3606 Otherwise, invalidate it. */ 3607 if (GET_CODE (PREV_INSN (insn)) == INSN 3608 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET 3609 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr) 3610 reg_next_use[regno] = PREV_INSN (insn); 3611 else 3612 reg_next_use[regno] = 0; 3613 3614 addr = q; 3615 regno = REGNO (q); 3616 3617 /* REGNO is now used in INCR which is below INSN, but 3618 it previously wasn't live here. If we don't mark 3619 it as needed, we'll put a REG_DEAD note for it 3620 on this insn, which is incorrect. */ 3621 SET_REGNO_REG_SET (needed, regno); 3622 3623 /* If there are any calls between INSN and INCR, show 3624 that REGNO now crosses them. */ 3625 for (temp = insn; temp != incr; temp = NEXT_INSN (temp)) 3626 if (GET_CODE (temp) == CALL_INSN) 3627 REG_N_CALLS_CROSSED (regno)++; 3628 } 3629 else 3630 return; 3631 3632 /* If we haven't returned, it means we were able to make the 3633 auto-inc, so update the status. First, record that this insn 3634 has an implicit side effect. */ 3635 3636 REG_NOTES (insn) 3637 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn)); 3638 3639 /* Modify the old increment-insn to simply copy 3640 the already-incremented value of our register. */ 3641 if (! validate_change (incr, &SET_SRC (set), addr, 0)) 3642 abort (); 3643 3644 /* If that makes it a no-op (copying the register into itself) delete 3645 it so it won't appear to be a "use" and a "set" of this 3646 register. */ 3647 if (SET_DEST (set) == addr) 3648 { 3649 PUT_CODE (incr, NOTE); 3650 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED; 3651 NOTE_SOURCE_FILE (incr) = 0; 3652 } 3653 3654 if (regno >= FIRST_PSEUDO_REGISTER) 3655 { 3656 /* Count an extra reference to the reg. When a reg is 3657 incremented, spilling it is worse, so we want to make 3658 that less likely. */ 3659 REG_N_REFS (regno) += loop_depth; 3660 3661 /* Count the increment as a setting of the register, 3662 even though it isn't a SET in rtl. */ 3663 REG_N_SETS (regno)++; 3664 } 3665 } 3666 } 3667} 3668#endif /* AUTO_INC_DEC */ 3669 3670/* Scan expression X and store a 1-bit in LIVE for each reg it uses. 3671 This is done assuming the registers needed from X 3672 are those that have 1-bits in NEEDED. 3673 3674 On the final pass, FINAL is 1. This means try for autoincrement 3675 and count the uses and deaths of each pseudo-reg. 3676 3677 INSN is the containing instruction. If INSN is dead, this function is not 3678 called. */ 3679 3680static void 3681mark_used_regs (needed, live, x, final, insn) 3682 regset needed; 3683 regset live; 3684 rtx x; 3685 int final; 3686 rtx insn; 3687{ 3688 register RTX_CODE code; 3689 register int regno; 3690 int i; 3691 3692 retry: 3693 code = GET_CODE (x); 3694 switch (code) 3695 { 3696 case LABEL_REF: 3697 case SYMBOL_REF: 3698 case CONST_INT: 3699 case CONST: 3700 case CONST_DOUBLE: 3701 case PC: 3702 case ADDR_VEC: 3703 case ADDR_DIFF_VEC: 3704 return; 3705 3706#ifdef HAVE_cc0 3707 case CC0: 3708 cc0_live = 1; 3709 return; 3710#endif 3711 3712 case CLOBBER: 3713 /* If we are clobbering a MEM, mark any registers inside the address 3714 as being used. */ 3715 if (GET_CODE (XEXP (x, 0)) == MEM) 3716 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn); 3717 return; 3718 3719 case MEM: 3720 /* Invalidate the data for the last MEM stored, but only if MEM is 3721 something that can be stored into. */ 3722 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF 3723 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) 3724 ; /* needn't clear the memory set list */ 3725 else 3726 { 3727 rtx temp = mem_set_list; 3728 rtx prev = NULL_RTX; 3729 3730 while (temp) 3731 { 3732 if (anti_dependence (XEXP (temp, 0), x)) 3733 { 3734 /* Splice temp out of the list. */ 3735 if (prev) 3736 XEXP (prev, 1) = XEXP (temp, 1); 3737 else 3738 mem_set_list = XEXP (temp, 1); 3739 } 3740 else 3741 prev = temp; 3742 temp = XEXP (temp, 1); 3743 } 3744 } 3745 3746 /* If the memory reference had embedded side effects (autoincrement 3747 address modes. Then we may need to kill some entries on the 3748 memory set list. */ 3749 if (insn) 3750 invalidate_mems_from_autoinc (insn); 3751 3752#ifdef AUTO_INC_DEC 3753 if (final) 3754 find_auto_inc (needed, x, insn); 3755#endif 3756 break; 3757 3758 case SUBREG: 3759 if (GET_CODE (SUBREG_REG (x)) == REG 3760 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER 3761 && (GET_MODE_SIZE (GET_MODE (x)) 3762 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) 3763 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1; 3764 3765 /* While we're here, optimize this case. */ 3766 x = SUBREG_REG (x); 3767 3768 /* In case the SUBREG is not of a register, don't optimize */ 3769 if (GET_CODE (x) != REG) 3770 { 3771 mark_used_regs (needed, live, x, final, insn); 3772 return; 3773 } 3774 3775 /* ... fall through ... */ 3776 3777 case REG: 3778 /* See a register other than being set 3779 => mark it as needed. */ 3780 3781 regno = REGNO (x); 3782 { 3783 int some_needed = REGNO_REG_SET_P (needed, regno); 3784 int some_not_needed = ! some_needed; 3785 3786 SET_REGNO_REG_SET (live, regno); 3787 3788 /* A hard reg in a wide mode may really be multiple registers. 3789 If so, mark all of them just like the first. */ 3790 if (regno < FIRST_PSEUDO_REGISTER) 3791 { 3792 int n; 3793 3794 /* For stack ptr or fixed arg pointer, 3795 nothing below can be necessary, so waste no more time. */ 3796 if (regno == STACK_POINTER_REGNUM 3797#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 3798 || (regno == HARD_FRAME_POINTER_REGNUM 3799 && (! reload_completed || frame_pointer_needed)) 3800#endif 3801#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 3802 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) 3803#endif 3804 || (regno == FRAME_POINTER_REGNUM 3805 && (! reload_completed || frame_pointer_needed))) 3806 { 3807 /* If this is a register we are going to try to eliminate, 3808 don't mark it live here. If we are successful in 3809 eliminating it, it need not be live unless it is used for 3810 pseudos, in which case it will have been set live when 3811 it was allocated to the pseudos. If the register will not 3812 be eliminated, reload will set it live at that point. */ 3813 3814 if (! TEST_HARD_REG_BIT (elim_reg_set, regno)) 3815 regs_ever_live[regno] = 1; 3816 return; 3817 } 3818 /* No death notes for global register variables; 3819 their values are live after this function exits. */ 3820 if (global_regs[regno]) 3821 { 3822 if (final) 3823 reg_next_use[regno] = insn; 3824 return; 3825 } 3826 3827 n = HARD_REGNO_NREGS (regno, GET_MODE (x)); 3828 while (--n > 0) 3829 { 3830 int regno_n = regno + n; 3831 int needed_regno = REGNO_REG_SET_P (needed, regno_n); 3832 3833 SET_REGNO_REG_SET (live, regno_n); 3834 some_needed |= needed_regno; 3835 some_not_needed |= ! needed_regno; 3836 } 3837 } 3838 if (final) 3839 { 3840 /* Record where each reg is used, so when the reg 3841 is set we know the next insn that uses it. */ 3842 3843 reg_next_use[regno] = insn; 3844 3845 if (regno < FIRST_PSEUDO_REGISTER) 3846 { 3847 /* If a hard reg is being used, 3848 record that this function does use it. */ 3849 3850 i = HARD_REGNO_NREGS (regno, GET_MODE (x)); 3851 if (i == 0) 3852 i = 1; 3853 do 3854 regs_ever_live[regno + --i] = 1; 3855 while (i > 0); 3856 } 3857 else 3858 { 3859 /* Keep track of which basic block each reg appears in. */ 3860 3861 register int blocknum = BLOCK_NUM (insn); 3862 3863 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN) 3864 REG_BASIC_BLOCK (regno) = blocknum; 3865 else if (REG_BASIC_BLOCK (regno) != blocknum) 3866 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL; 3867 3868 /* Count (weighted) number of uses of each reg. */ 3869 3870 REG_N_REFS (regno) += loop_depth; 3871 } 3872 3873 /* Record and count the insns in which a reg dies. 3874 If it is used in this insn and was dead below the insn 3875 then it dies in this insn. If it was set in this insn, 3876 we do not make a REG_DEAD note; likewise if we already 3877 made such a note. */ 3878 3879 if (some_not_needed 3880 && ! dead_or_set_p (insn, x) 3881#if 0 3882 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno]) 3883#endif 3884 ) 3885 { 3886 /* Check for the case where the register dying partially 3887 overlaps the register set by this insn. */ 3888 if (regno < FIRST_PSEUDO_REGISTER 3889 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1) 3890 { 3891 int n = HARD_REGNO_NREGS (regno, GET_MODE (x)); 3892 while (--n >= 0) 3893 some_needed |= dead_or_set_regno_p (insn, regno + n); 3894 } 3895 3896 /* If none of the words in X is needed, make a REG_DEAD 3897 note. Otherwise, we must make partial REG_DEAD notes. */ 3898 if (! some_needed) 3899 { 3900 REG_NOTES (insn) 3901 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn)); 3902 REG_N_DEATHS (regno)++; 3903 } 3904 else 3905 { 3906 int i; 3907 3908 /* Don't make a REG_DEAD note for a part of a register 3909 that is set in the insn. */ 3910 3911 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1; 3912 i >= 0; i--) 3913 if (!REGNO_REG_SET_P (needed, regno + i) 3914 && ! dead_or_set_regno_p (insn, regno + i)) 3915 REG_NOTES (insn) 3916 = gen_rtx_EXPR_LIST (REG_DEAD, 3917 gen_rtx_REG (reg_raw_mode[regno + i], 3918 regno + i), 3919 REG_NOTES (insn)); 3920 } 3921 } 3922 } 3923 } 3924 return; 3925 3926 case SET: 3927 { 3928 register rtx testreg = SET_DEST (x); 3929 int mark_dest = 0; 3930 3931 /* If storing into MEM, don't show it as being used. But do 3932 show the address as being used. */ 3933 if (GET_CODE (testreg) == MEM) 3934 { 3935#ifdef AUTO_INC_DEC 3936 if (final) 3937 find_auto_inc (needed, testreg, insn); 3938#endif 3939 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn); 3940 mark_used_regs (needed, live, SET_SRC (x), final, insn); 3941 return; 3942 } 3943 3944 /* Storing in STRICT_LOW_PART is like storing in a reg 3945 in that this SET might be dead, so ignore it in TESTREG. 3946 but in some other ways it is like using the reg. 3947 3948 Storing in a SUBREG or a bit field is like storing the entire 3949 register in that if the register's value is not used 3950 then this SET is not needed. */ 3951 while (GET_CODE (testreg) == STRICT_LOW_PART 3952 || GET_CODE (testreg) == ZERO_EXTRACT 3953 || GET_CODE (testreg) == SIGN_EXTRACT 3954 || GET_CODE (testreg) == SUBREG) 3955 { 3956 if (GET_CODE (testreg) == SUBREG 3957 && GET_CODE (SUBREG_REG (testreg)) == REG 3958 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER 3959 && (GET_MODE_SIZE (GET_MODE (testreg)) 3960 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg))))) 3961 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1; 3962 3963 /* Modifying a single register in an alternate mode 3964 does not use any of the old value. But these other 3965 ways of storing in a register do use the old value. */ 3966 if (GET_CODE (testreg) == SUBREG 3967 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg))) 3968 ; 3969 else 3970 mark_dest = 1; 3971 3972 testreg = XEXP (testreg, 0); 3973 } 3974 3975 /* If this is a store into a register, 3976 recursively scan the value being stored. */ 3977 3978 if ((GET_CODE (testreg) == PARALLEL 3979 && GET_MODE (testreg) == BLKmode) 3980 || (GET_CODE (testreg) == REG 3981 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM 3982 && (! reload_completed || frame_pointer_needed))) 3983#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 3984 && ! (regno == HARD_FRAME_POINTER_REGNUM 3985 && (! reload_completed || frame_pointer_needed)) 3986#endif 3987#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 3988 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) 3989#endif 3990 )) 3991 /* We used to exclude global_regs here, but that seems wrong. 3992 Storing in them is like storing in mem. */ 3993 { 3994 mark_used_regs (needed, live, SET_SRC (x), final, insn); 3995 if (mark_dest) 3996 mark_used_regs (needed, live, SET_DEST (x), final, insn); 3997 return; 3998 } 3999 } 4000 break; 4001 4002 case RETURN: 4003 /* If exiting needs the right stack value, consider this insn as 4004 using the stack pointer. In any event, consider it as using 4005 all global registers and all registers used by return. */ 4006 if (! EXIT_IGNORE_STACK 4007 || (! FRAME_POINTER_REQUIRED 4008 && ! current_function_calls_alloca 4009 && flag_omit_frame_pointer) 4010 || current_function_sp_is_unchanging) 4011 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM); 4012 4013 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4014 if (global_regs[i] 4015#ifdef EPILOGUE_USES 4016 || EPILOGUE_USES (i) 4017#endif 4018 ) 4019 SET_REGNO_REG_SET (live, i); 4020 break; 4021 4022 case ASM_OPERANDS: 4023 case UNSPEC_VOLATILE: 4024 case TRAP_IF: 4025 case ASM_INPUT: 4026 { 4027 /* Traditional and volatile asm instructions must be considered to use 4028 and clobber all hard registers, all pseudo-registers and all of 4029 memory. So must TRAP_IF and UNSPEC_VOLATILE operations. 4030 4031 Consider for instance a volatile asm that changes the fpu rounding 4032 mode. An insn should not be moved across this even if it only uses 4033 pseudo-regs because it might give an incorrectly rounded result. 4034 4035 ?!? Unfortunately, marking all hard registers as live causes massive 4036 problems for the register allocator and marking all pseudos as live 4037 creates mountains of uninitialized variable warnings. 4038 4039 So for now, just clear the memory set list and mark any regs 4040 we can find in ASM_OPERANDS as used. */ 4041 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x)) 4042 mem_set_list = NULL_RTX; 4043 4044 /* For all ASM_OPERANDS, we must traverse the vector of input operands. 4045 We can not just fall through here since then we would be confused 4046 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate 4047 traditional asms unlike their normal usage. */ 4048 if (code == ASM_OPERANDS) 4049 { 4050 int j; 4051 4052 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++) 4053 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j), 4054 final, insn); 4055 } 4056 break; 4057 } 4058 4059 4060 default: 4061 break; 4062 } 4063 4064 /* Recursively scan the operands of this expression. */ 4065 4066 { 4067 register char *fmt = GET_RTX_FORMAT (code); 4068 register int i; 4069 4070 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 4071 { 4072 if (fmt[i] == 'e') 4073 { 4074 /* Tail recursive case: save a function call level. */ 4075 if (i == 0) 4076 { 4077 x = XEXP (x, 0); 4078 goto retry; 4079 } 4080 mark_used_regs (needed, live, XEXP (x, i), final, insn); 4081 } 4082 else if (fmt[i] == 'E') 4083 { 4084 register int j; 4085 for (j = 0; j < XVECLEN (x, i); j++) 4086 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn); 4087 } 4088 } 4089 } 4090} 4091 4092#ifdef AUTO_INC_DEC 4093 4094static int 4095try_pre_increment_1 (insn) 4096 rtx insn; 4097{ 4098 /* Find the next use of this reg. If in same basic block, 4099 make it do pre-increment or pre-decrement if appropriate. */ 4100 rtx x = single_set (insn); 4101 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1) 4102 * INTVAL (XEXP (SET_SRC (x), 1))); 4103 int regno = REGNO (SET_DEST (x)); 4104 rtx y = reg_next_use[regno]; 4105 if (y != 0 4106 && BLOCK_NUM (y) == BLOCK_NUM (insn) 4107 /* Don't do this if the reg dies, or gets set in y; a standard addressing 4108 mode would be better. */ 4109 && ! dead_or_set_p (y, SET_DEST (x)) 4110 && try_pre_increment (y, SET_DEST (x), amount)) 4111 { 4112 /* We have found a suitable auto-increment 4113 and already changed insn Y to do it. 4114 So flush this increment-instruction. */ 4115 PUT_CODE (insn, NOTE); 4116 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; 4117 NOTE_SOURCE_FILE (insn) = 0; 4118 /* Count a reference to this reg for the increment 4119 insn we are deleting. When a reg is incremented. 4120 spilling it is worse, so we want to make that 4121 less likely. */ 4122 if (regno >= FIRST_PSEUDO_REGISTER) 4123 { 4124 REG_N_REFS (regno) += loop_depth; 4125 REG_N_SETS (regno)++; 4126 } 4127 return 1; 4128 } 4129 return 0; 4130} 4131 4132/* Try to change INSN so that it does pre-increment or pre-decrement 4133 addressing on register REG in order to add AMOUNT to REG. 4134 AMOUNT is negative for pre-decrement. 4135 Returns 1 if the change could be made. 4136 This checks all about the validity of the result of modifying INSN. */ 4137 4138static int 4139try_pre_increment (insn, reg, amount) 4140 rtx insn, reg; 4141 HOST_WIDE_INT amount; 4142{ 4143 register rtx use; 4144 4145 /* Nonzero if we can try to make a pre-increment or pre-decrement. 4146 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */ 4147 int pre_ok = 0; 4148 /* Nonzero if we can try to make a post-increment or post-decrement. 4149 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,... 4150 It is possible for both PRE_OK and POST_OK to be nonzero if the machine 4151 supports both pre-inc and post-inc, or both pre-dec and post-dec. */ 4152 int post_ok = 0; 4153 4154 /* Nonzero if the opportunity actually requires post-inc or post-dec. */ 4155 int do_post = 0; 4156 4157 /* From the sign of increment, see which possibilities are conceivable 4158 on this target machine. */ 4159 if (HAVE_PRE_INCREMENT && amount > 0) 4160 pre_ok = 1; 4161 if (HAVE_POST_INCREMENT && amount > 0) 4162 post_ok = 1; 4163 4164 if (HAVE_PRE_DECREMENT && amount < 0) 4165 pre_ok = 1; 4166 if (HAVE_POST_DECREMENT && amount < 0) 4167 post_ok = 1; 4168 4169 if (! (pre_ok || post_ok)) 4170 return 0; 4171 4172 /* It is not safe to add a side effect to a jump insn 4173 because if the incremented register is spilled and must be reloaded 4174 there would be no way to store the incremented value back in memory. */ 4175 4176 if (GET_CODE (insn) == JUMP_INSN) 4177 return 0; 4178 4179 use = 0; 4180 if (pre_ok) 4181 use = find_use_as_address (PATTERN (insn), reg, 0); 4182 if (post_ok && (use == 0 || use == (rtx) 1)) 4183 { 4184 use = find_use_as_address (PATTERN (insn), reg, -amount); 4185 do_post = 1; 4186 } 4187 4188 if (use == 0 || use == (rtx) 1) 4189 return 0; 4190 4191 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount)) 4192 return 0; 4193 4194 /* See if this combination of instruction and addressing mode exists. */ 4195 if (! validate_change (insn, &XEXP (use, 0), 4196 gen_rtx_fmt_e (amount > 0 4197 ? (do_post ? POST_INC : PRE_INC) 4198 : (do_post ? POST_DEC : PRE_DEC), 4199 Pmode, reg), 0)) 4200 return 0; 4201 4202 /* Record that this insn now has an implicit side effect on X. */ 4203 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn)); 4204 return 1; 4205} 4206 4207#endif /* AUTO_INC_DEC */ 4208 4209/* Find the place in the rtx X where REG is used as a memory address. 4210 Return the MEM rtx that so uses it. 4211 If PLUSCONST is nonzero, search instead for a memory address equivalent to 4212 (plus REG (const_int PLUSCONST)). 4213 4214 If such an address does not appear, return 0. 4215 If REG appears more than once, or is used other than in such an address, 4216 return (rtx)1. */ 4217 4218rtx 4219find_use_as_address (x, reg, plusconst) 4220 register rtx x; 4221 rtx reg; 4222 HOST_WIDE_INT plusconst; 4223{ 4224 enum rtx_code code = GET_CODE (x); 4225 char *fmt = GET_RTX_FORMAT (code); 4226 register int i; 4227 register rtx value = 0; 4228 register rtx tem; 4229 4230 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0) 4231 return x; 4232 4233 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS 4234 && XEXP (XEXP (x, 0), 0) == reg 4235 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT 4236 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst) 4237 return x; 4238 4239 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT) 4240 { 4241 /* If REG occurs inside a MEM used in a bit-field reference, 4242 that is unacceptable. */ 4243 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0) 4244 return (rtx) (HOST_WIDE_INT) 1; 4245 } 4246 4247 if (x == reg) 4248 return (rtx) (HOST_WIDE_INT) 1; 4249 4250 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 4251 { 4252 if (fmt[i] == 'e') 4253 { 4254 tem = find_use_as_address (XEXP (x, i), reg, plusconst); 4255 if (value == 0) 4256 value = tem; 4257 else if (tem != 0) 4258 return (rtx) (HOST_WIDE_INT) 1; 4259 } 4260 if (fmt[i] == 'E') 4261 { 4262 register int j; 4263 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 4264 { 4265 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst); 4266 if (value == 0) 4267 value = tem; 4268 else if (tem != 0) 4269 return (rtx) (HOST_WIDE_INT) 1; 4270 } 4271 } 4272 } 4273 4274 return value; 4275} 4276 4277/* Write information about registers and basic blocks into FILE. 4278 This is part of making a debugging dump. */ 4279 4280void 4281dump_flow_info (file) 4282 FILE *file; 4283{ 4284 register int i; 4285 static char *reg_class_names[] = REG_CLASS_NAMES; 4286 4287 fprintf (file, "%d registers.\n", max_regno); 4288 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) 4289 if (REG_N_REFS (i)) 4290 { 4291 enum reg_class class, altclass; 4292 fprintf (file, "\nRegister %d used %d times across %d insns", 4293 i, REG_N_REFS (i), REG_LIVE_LENGTH (i)); 4294 if (REG_BASIC_BLOCK (i) >= 0) 4295 fprintf (file, " in block %d", REG_BASIC_BLOCK (i)); 4296 if (REG_N_SETS (i)) 4297 fprintf (file, "; set %d time%s", REG_N_SETS (i), 4298 (REG_N_SETS (i) == 1) ? "" : "s"); 4299 if (REG_USERVAR_P (regno_reg_rtx[i])) 4300 fprintf (file, "; user var"); 4301 if (REG_N_DEATHS (i) != 1) 4302 fprintf (file, "; dies in %d places", REG_N_DEATHS (i)); 4303 if (REG_N_CALLS_CROSSED (i) == 1) 4304 fprintf (file, "; crosses 1 call"); 4305 else if (REG_N_CALLS_CROSSED (i)) 4306 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i)); 4307 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD) 4308 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i)); 4309 class = reg_preferred_class (i); 4310 altclass = reg_alternate_class (i); 4311 if (class != GENERAL_REGS || altclass != ALL_REGS) 4312 { 4313 if (altclass == ALL_REGS || class == ALL_REGS) 4314 fprintf (file, "; pref %s", reg_class_names[(int) class]); 4315 else if (altclass == NO_REGS) 4316 fprintf (file, "; %s or none", reg_class_names[(int) class]); 4317 else 4318 fprintf (file, "; pref %s, else %s", 4319 reg_class_names[(int) class], 4320 reg_class_names[(int) altclass]); 4321 } 4322 if (REGNO_POINTER_FLAG (i)) 4323 fprintf (file, "; pointer"); 4324 fprintf (file, ".\n"); 4325 } 4326 4327 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks); 4328 for (i = 0; i < n_basic_blocks; i++) 4329 { 4330 register basic_block bb = BASIC_BLOCK (i); 4331 register int regno; 4332 register edge e; 4333 4334 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n", 4335 i, INSN_UID (bb->head), INSN_UID (bb->end)); 4336 4337 fprintf (file, "Predecessors: "); 4338 for (e = bb->pred; e ; e = e->pred_next) 4339 dump_edge_info (file, e, 0); 4340 4341 fprintf (file, "\nSuccessors: "); 4342 for (e = bb->succ; e ; e = e->succ_next) 4343 dump_edge_info (file, e, 1); 4344 4345 fprintf (file, "\nRegisters live at start:"); 4346 if (bb->global_live_at_start) 4347 { 4348 for (regno = 0; regno < max_regno; regno++) 4349 if (REGNO_REG_SET_P (bb->global_live_at_start, regno)) 4350 fprintf (file, " %d", regno); 4351 } 4352 else 4353 fprintf (file, " n/a"); 4354 4355 fprintf (file, "\nRegisters live at end:"); 4356 if (bb->global_live_at_end) 4357 { 4358 for (regno = 0; regno < max_regno; regno++) 4359 if (REGNO_REG_SET_P (bb->global_live_at_end, regno)) 4360 fprintf (file, " %d", regno); 4361 } 4362 else 4363 fprintf (file, " n/a"); 4364 4365 putc('\n', file); 4366 } 4367 4368 putc('\n', file); 4369} 4370 4371static void 4372dump_edge_info (file, e, do_succ) 4373 FILE *file; 4374 edge e; 4375 int do_succ; 4376{ 4377 basic_block side = (do_succ ? e->dest : e->src); 4378 4379 if (side == ENTRY_BLOCK_PTR) 4380 fputs (" ENTRY", file); 4381 else if (side == EXIT_BLOCK_PTR) 4382 fputs (" EXIT", file); 4383 else 4384 fprintf (file, " %d", side->index); 4385 4386 if (e->flags) 4387 { 4388 static char * bitnames[] = { 4389 "fallthru", "crit", "ab", "abcall", "eh", "fake" 4390 }; 4391 int comma = 0; 4392 int i, flags = e->flags; 4393 4394 fputc (' ', file); 4395 fputc ('(', file); 4396 for (i = 0; flags; i++) 4397 if (flags & (1 << i)) 4398 { 4399 flags &= ~(1 << i); 4400 4401 if (comma) 4402 fputc (',', file); 4403 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames))) 4404 fputs (bitnames[i], file); 4405 else 4406 fprintf (file, "%d", i); 4407 comma = 1; 4408 } 4409 fputc (')', file); 4410 } 4411} 4412 4413 4414/* Like print_rtl, but also print out live information for the start of each 4415 basic block. */ 4416 4417void 4418print_rtl_with_bb (outf, rtx_first) 4419 FILE *outf; 4420 rtx rtx_first; 4421{ 4422 register rtx tmp_rtx; 4423 4424 if (rtx_first == 0) 4425 fprintf (outf, "(nil)\n"); 4426 else 4427 { 4428 int i; 4429 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB }; 4430 int max_uid = get_max_uid (); 4431 basic_block *start = (basic_block *) 4432 alloca (max_uid * sizeof (basic_block)); 4433 basic_block *end = (basic_block *) 4434 alloca (max_uid * sizeof (basic_block)); 4435 enum bb_state *in_bb_p = (enum bb_state *) 4436 alloca (max_uid * sizeof (enum bb_state)); 4437 4438 memset (start, 0, max_uid * sizeof (basic_block)); 4439 memset (end, 0, max_uid * sizeof (basic_block)); 4440 memset (in_bb_p, 0, max_uid * sizeof (enum bb_state)); 4441 4442 for (i = n_basic_blocks - 1; i >= 0; i--) 4443 { 4444 basic_block bb = BASIC_BLOCK (i); 4445 rtx x; 4446 4447 start[INSN_UID (bb->head)] = bb; 4448 end[INSN_UID (bb->end)] = bb; 4449 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x)) 4450 { 4451 enum bb_state state = IN_MULTIPLE_BB; 4452 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB) 4453 state = IN_ONE_BB; 4454 in_bb_p[INSN_UID(x)] = state; 4455 4456 if (x == bb->end) 4457 break; 4458 } 4459 } 4460 4461 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx)) 4462 { 4463 int did_output; 4464 basic_block bb; 4465 4466 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL) 4467 { 4468 fprintf (outf, ";; Start of basic block %d, registers live:", 4469 bb->index); 4470 4471 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i, 4472 { 4473 fprintf (outf, " %d", i); 4474 if (i < FIRST_PSEUDO_REGISTER) 4475 fprintf (outf, " [%s]", 4476 reg_names[i]); 4477 }); 4478 putc ('\n', outf); 4479 } 4480 4481 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB 4482 && GET_CODE (tmp_rtx) != NOTE 4483 && GET_CODE (tmp_rtx) != BARRIER 4484 && ! obey_regdecls) 4485 fprintf (outf, ";; Insn is not within a basic block\n"); 4486 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB) 4487 fprintf (outf, ";; Insn is in multiple basic blocks\n"); 4488 4489 did_output = print_rtl_single (outf, tmp_rtx); 4490 4491 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL) 4492 fprintf (outf, ";; End of basic block %d\n", bb->index); 4493 4494 if (did_output) 4495 putc ('\n', outf); 4496 } 4497 } 4498} 4499 4500 4501/* Integer list support. */ 4502 4503/* Allocate a node from list *HEAD_PTR. */ 4504 4505static int_list_ptr 4506alloc_int_list_node (head_ptr) 4507 int_list_block **head_ptr; 4508{ 4509 struct int_list_block *first_blk = *head_ptr; 4510 4511 if (first_blk == NULL || first_blk->nodes_left <= 0) 4512 { 4513 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block)); 4514 first_blk->nodes_left = INT_LIST_NODES_IN_BLK; 4515 first_blk->next = *head_ptr; 4516 *head_ptr = first_blk; 4517 } 4518 4519 first_blk->nodes_left--; 4520 return &first_blk->nodes[first_blk->nodes_left]; 4521} 4522 4523/* Pointer to head of predecessor/successor block list. */ 4524static int_list_block *pred_int_list_blocks; 4525 4526/* Add a new node to integer list LIST with value VAL. 4527 LIST is a pointer to a list object to allow for different implementations. 4528 If *LIST is initially NULL, the list is empty. 4529 The caller must not care whether the element is added to the front or 4530 to the end of the list (to allow for different implementations). */ 4531 4532static int_list_ptr 4533add_int_list_node (blk_list, list, val) 4534 int_list_block **blk_list; 4535 int_list **list; 4536 int val; 4537{ 4538 int_list_ptr p = alloc_int_list_node (blk_list); 4539 4540 p->val = val; 4541 p->next = *list; 4542 *list = p; 4543 return p; 4544} 4545 4546/* Free the blocks of lists at BLK_LIST. */ 4547 4548void 4549free_int_list (blk_list) 4550 int_list_block **blk_list; 4551{ 4552 int_list_block *p, *next; 4553 4554 for (p = *blk_list; p != NULL; p = next) 4555 { 4556 next = p->next; 4557 free (p); 4558 } 4559 4560 /* Mark list as empty for the next function we compile. */ 4561 *blk_list = NULL; 4562} 4563 4564/* Predecessor/successor computation. */ 4565 4566/* Mark PRED_BB a precessor of SUCC_BB, 4567 and conversely SUCC_BB a successor of PRED_BB. */ 4568 4569static void 4570add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs) 4571 int pred_bb; 4572 int succ_bb; 4573 int_list_ptr *s_preds; 4574 int_list_ptr *s_succs; 4575 int *num_preds; 4576 int *num_succs; 4577{ 4578 if (succ_bb != EXIT_BLOCK) 4579 { 4580 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb); 4581 num_preds[succ_bb]++; 4582 } 4583 if (pred_bb != ENTRY_BLOCK) 4584 { 4585 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb); 4586 num_succs[pred_bb]++; 4587 } 4588} 4589 4590/* Convert edge lists into pred/succ lists for backward compatibility. */ 4591 4592void 4593compute_preds_succs (s_preds, s_succs, num_preds, num_succs) 4594 int_list_ptr *s_preds; 4595 int_list_ptr *s_succs; 4596 int *num_preds; 4597 int *num_succs; 4598{ 4599 int i, n = n_basic_blocks; 4600 edge e; 4601 4602 memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr)); 4603 memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr)); 4604 memset (num_preds, 0, n_basic_blocks * sizeof (int)); 4605 memset (num_succs, 0, n_basic_blocks * sizeof (int)); 4606 4607 for (i = 0; i < n; ++i) 4608 { 4609 basic_block bb = BASIC_BLOCK (i); 4610 4611 for (e = bb->succ; e ; e = e->succ_next) 4612 add_pred_succ (i, e->dest->index, s_preds, s_succs, 4613 num_preds, num_succs); 4614 } 4615 4616 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next) 4617 add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs, 4618 num_preds, num_succs); 4619} 4620 4621void 4622dump_bb_data (file, preds, succs, live_info) 4623 FILE *file; 4624 int_list_ptr *preds; 4625 int_list_ptr *succs; 4626 int live_info; 4627{ 4628 int bb; 4629 int_list_ptr p; 4630 4631 fprintf (file, "BB data\n\n"); 4632 for (bb = 0; bb < n_basic_blocks; bb++) 4633 { 4634 fprintf (file, "BB %d, start %d, end %d\n", bb, 4635 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb))); 4636 fprintf (file, " preds:"); 4637 for (p = preds[bb]; p != NULL; p = p->next) 4638 { 4639 int pred_bb = INT_LIST_VAL (p); 4640 if (pred_bb == ENTRY_BLOCK) 4641 fprintf (file, " entry"); 4642 else 4643 fprintf (file, " %d", pred_bb); 4644 } 4645 fprintf (file, "\n"); 4646 fprintf (file, " succs:"); 4647 for (p = succs[bb]; p != NULL; p = p->next) 4648 { 4649 int succ_bb = INT_LIST_VAL (p); 4650 if (succ_bb == EXIT_BLOCK) 4651 fprintf (file, " exit"); 4652 else 4653 fprintf (file, " %d", succ_bb); 4654 } 4655 if (live_info) 4656 { 4657 int regno; 4658 fprintf (file, "\nRegisters live at start:"); 4659 for (regno = 0; regno < max_regno; regno++) 4660 if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno)) 4661 fprintf (file, " %d", regno); 4662 fprintf (file, "\n"); 4663 } 4664 fprintf (file, "\n"); 4665 } 4666 fprintf (file, "\n"); 4667} 4668 4669/* Free basic block data storage. */ 4670 4671void 4672free_bb_mem () 4673{ 4674 free_int_list (&pred_int_list_blocks); 4675} 4676 4677/* Compute dominator relationships. */ 4678void 4679compute_dominators (dominators, post_dominators, s_preds, s_succs) 4680 sbitmap *dominators; 4681 sbitmap *post_dominators; 4682 int_list_ptr *s_preds; 4683 int_list_ptr *s_succs; 4684{ 4685 int bb, changed, passes; 4686 sbitmap *temp_bitmap; 4687 4688 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks); 4689 sbitmap_vector_ones (dominators, n_basic_blocks); 4690 sbitmap_vector_ones (post_dominators, n_basic_blocks); 4691 sbitmap_vector_zero (temp_bitmap, n_basic_blocks); 4692 4693 sbitmap_zero (dominators[0]); 4694 SET_BIT (dominators[0], 0); 4695 4696 sbitmap_zero (post_dominators[n_basic_blocks - 1]); 4697 SET_BIT (post_dominators[n_basic_blocks - 1], 0); 4698 4699 passes = 0; 4700 changed = 1; 4701 while (changed) 4702 { 4703 changed = 0; 4704 for (bb = 1; bb < n_basic_blocks; bb++) 4705 { 4706 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators, 4707 bb, s_preds); 4708 SET_BIT (temp_bitmap[bb], bb); 4709 changed |= sbitmap_a_and_b (dominators[bb], 4710 dominators[bb], 4711 temp_bitmap[bb]); 4712 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators, 4713 bb, s_succs); 4714 SET_BIT (temp_bitmap[bb], bb); 4715 changed |= sbitmap_a_and_b (post_dominators[bb], 4716 post_dominators[bb], 4717 temp_bitmap[bb]); 4718 } 4719 passes++; 4720 } 4721 4722 free (temp_bitmap); 4723} 4724 4725/* Given DOMINATORS, compute the immediate dominators into IDOM. */ 4726 4727void 4728compute_immediate_dominators (idom, dominators) 4729 int *idom; 4730 sbitmap *dominators; 4731{ 4732 sbitmap *tmp; 4733 int b; 4734 4735 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks); 4736 4737 /* Begin with tmp(n) = dom(n) - { n }. */ 4738 for (b = n_basic_blocks; --b >= 0; ) 4739 { 4740 sbitmap_copy (tmp[b], dominators[b]); 4741 RESET_BIT (tmp[b], b); 4742 } 4743 4744 /* Subtract out all of our dominator's dominators. */ 4745 for (b = n_basic_blocks; --b >= 0; ) 4746 { 4747 sbitmap tmp_b = tmp[b]; 4748 int s; 4749 4750 for (s = n_basic_blocks; --s >= 0; ) 4751 if (TEST_BIT (tmp_b, s)) 4752 sbitmap_difference (tmp_b, tmp_b, tmp[s]); 4753 } 4754 4755 /* Find the one bit set in the bitmap and put it in the output array. */ 4756 for (b = n_basic_blocks; --b >= 0; ) 4757 { 4758 int t; 4759 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; }); 4760 } 4761 4762 sbitmap_vector_free (tmp); 4763} 4764 4765/* Count for a single SET rtx, X. */ 4766 4767static void 4768count_reg_sets_1 (x) 4769 rtx x; 4770{ 4771 register int regno; 4772 register rtx reg = SET_DEST (x); 4773 4774 /* Find the register that's set/clobbered. */ 4775 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT 4776 || GET_CODE (reg) == SIGN_EXTRACT 4777 || GET_CODE (reg) == STRICT_LOW_PART) 4778 reg = XEXP (reg, 0); 4779 4780 if (GET_CODE (reg) == PARALLEL 4781 && GET_MODE (reg) == BLKmode) 4782 { 4783 register int i; 4784 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--) 4785 count_reg_sets_1 (XVECEXP (reg, 0, i)); 4786 return; 4787 } 4788 4789 if (GET_CODE (reg) == REG) 4790 { 4791 regno = REGNO (reg); 4792 if (regno >= FIRST_PSEUDO_REGISTER) 4793 { 4794 /* Count (weighted) references, stores, etc. This counts a 4795 register twice if it is modified, but that is correct. */ 4796 REG_N_SETS (regno)++; 4797 4798 REG_N_REFS (regno) += loop_depth; 4799 } 4800 } 4801} 4802 4803/* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment 4804 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */ 4805 4806static void 4807count_reg_sets (x) 4808 rtx x; 4809{ 4810 register RTX_CODE code = GET_CODE (x); 4811 4812 if (code == SET || code == CLOBBER) 4813 count_reg_sets_1 (x); 4814 else if (code == PARALLEL) 4815 { 4816 register int i; 4817 for (i = XVECLEN (x, 0) - 1; i >= 0; i--) 4818 { 4819 code = GET_CODE (XVECEXP (x, 0, i)); 4820 if (code == SET || code == CLOBBER) 4821 count_reg_sets_1 (XVECEXP (x, 0, i)); 4822 } 4823 } 4824} 4825 4826/* Increment REG_N_REFS by the current loop depth each register reference 4827 found in X. */ 4828 4829static void 4830count_reg_references (x) 4831 rtx x; 4832{ 4833 register RTX_CODE code; 4834 4835 retry: 4836 code = GET_CODE (x); 4837 switch (code) 4838 { 4839 case LABEL_REF: 4840 case SYMBOL_REF: 4841 case CONST_INT: 4842 case CONST: 4843 case CONST_DOUBLE: 4844 case PC: 4845 case ADDR_VEC: 4846 case ADDR_DIFF_VEC: 4847 case ASM_INPUT: 4848 return; 4849 4850#ifdef HAVE_cc0 4851 case CC0: 4852 return; 4853#endif 4854 4855 case CLOBBER: 4856 /* If we are clobbering a MEM, mark any registers inside the address 4857 as being used. */ 4858 if (GET_CODE (XEXP (x, 0)) == MEM) 4859 count_reg_references (XEXP (XEXP (x, 0), 0)); 4860 return; 4861 4862 case SUBREG: 4863 /* While we're here, optimize this case. */ 4864 x = SUBREG_REG (x); 4865 4866 /* In case the SUBREG is not of a register, don't optimize */ 4867 if (GET_CODE (x) != REG) 4868 { 4869 count_reg_references (x); 4870 return; 4871 } 4872 4873 /* ... fall through ... */ 4874 4875 case REG: 4876 if (REGNO (x) >= FIRST_PSEUDO_REGISTER) 4877 REG_N_REFS (REGNO (x)) += loop_depth; 4878 return; 4879 4880 case SET: 4881 { 4882 register rtx testreg = SET_DEST (x); 4883 int mark_dest = 0; 4884 4885 /* If storing into MEM, don't show it as being used. But do 4886 show the address as being used. */ 4887 if (GET_CODE (testreg) == MEM) 4888 { 4889 count_reg_references (XEXP (testreg, 0)); 4890 count_reg_references (SET_SRC (x)); 4891 return; 4892 } 4893 4894 /* Storing in STRICT_LOW_PART is like storing in a reg 4895 in that this SET might be dead, so ignore it in TESTREG. 4896 but in some other ways it is like using the reg. 4897 4898 Storing in a SUBREG or a bit field is like storing the entire 4899 register in that if the register's value is not used 4900 then this SET is not needed. */ 4901 while (GET_CODE (testreg) == STRICT_LOW_PART 4902 || GET_CODE (testreg) == ZERO_EXTRACT 4903 || GET_CODE (testreg) == SIGN_EXTRACT 4904 || GET_CODE (testreg) == SUBREG) 4905 { 4906 /* Modifying a single register in an alternate mode 4907 does not use any of the old value. But these other 4908 ways of storing in a register do use the old value. */ 4909 if (GET_CODE (testreg) == SUBREG 4910 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg))) 4911 ; 4912 else 4913 mark_dest = 1; 4914 4915 testreg = XEXP (testreg, 0); 4916 } 4917 4918 /* If this is a store into a register, 4919 recursively scan the value being stored. */ 4920 4921 if ((GET_CODE (testreg) == PARALLEL 4922 && GET_MODE (testreg) == BLKmode) 4923 || GET_CODE (testreg) == REG) 4924 { 4925 count_reg_references (SET_SRC (x)); 4926 if (mark_dest) 4927 count_reg_references (SET_DEST (x)); 4928 return; 4929 } 4930 } 4931 break; 4932 4933 default: 4934 break; 4935 } 4936 4937 /* Recursively scan the operands of this expression. */ 4938 4939 { 4940 register char *fmt = GET_RTX_FORMAT (code); 4941 register int i; 4942 4943 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 4944 { 4945 if (fmt[i] == 'e') 4946 { 4947 /* Tail recursive case: save a function call level. */ 4948 if (i == 0) 4949 { 4950 x = XEXP (x, 0); 4951 goto retry; 4952 } 4953 count_reg_references (XEXP (x, i)); 4954 } 4955 else if (fmt[i] == 'E') 4956 { 4957 register int j; 4958 for (j = 0; j < XVECLEN (x, i); j++) 4959 count_reg_references (XVECEXP (x, i, j)); 4960 } 4961 } 4962 } 4963} 4964 4965/* Recompute register set/reference counts immediately prior to register 4966 allocation. 4967 4968 This avoids problems with set/reference counts changing to/from values 4969 which have special meanings to the register allocators. 4970 4971 Additionally, the reference counts are the primary component used by the 4972 register allocators to prioritize pseudos for allocation to hard regs. 4973 More accurate reference counts generally lead to better register allocation. 4974 4975 F is the first insn to be scanned. 4976 LOOP_STEP denotes how much loop_depth should be incremented per 4977 loop nesting level in order to increase the ref count more for references 4978 in a loop. 4979 4980 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and 4981 possibly other information which is used by the register allocators. */ 4982 4983void 4984recompute_reg_usage (f, loop_step) 4985 rtx f; 4986 int loop_step; 4987{ 4988 rtx insn; 4989 int i, max_reg; 4990 4991 /* Clear out the old data. */ 4992 max_reg = max_reg_num (); 4993 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++) 4994 { 4995 REG_N_SETS (i) = 0; 4996 REG_N_REFS (i) = 0; 4997 } 4998 4999 /* Scan each insn in the chain and count how many times each register is 5000 set/used. */ 5001 loop_depth = 1; 5002 for (insn = f; insn; insn = NEXT_INSN (insn)) 5003 { 5004 /* Keep track of loop depth. */ 5005 if (GET_CODE (insn) == NOTE) 5006 { 5007 /* Look for loop boundaries. */ 5008 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END) 5009 loop_depth -= loop_step; 5010 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG) 5011 loop_depth += loop_step; 5012 5013 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. 5014 Abort now rather than setting register status incorrectly. */ 5015 if (loop_depth == 0) 5016 abort (); 5017 } 5018 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') 5019 { 5020 rtx links; 5021 5022 /* This call will increment REG_N_SETS for each SET or CLOBBER 5023 of a register in INSN. It will also increment REG_N_REFS 5024 by the loop depth for each set of a register in INSN. */ 5025 count_reg_sets (PATTERN (insn)); 5026 5027 /* count_reg_sets does not detect autoincrement address modes, so 5028 detect them here by looking at the notes attached to INSN. */ 5029 for (links = REG_NOTES (insn); links; links = XEXP (links, 1)) 5030 { 5031 if (REG_NOTE_KIND (links) == REG_INC) 5032 /* Count (weighted) references, stores, etc. This counts a 5033 register twice if it is modified, but that is correct. */ 5034 REG_N_SETS (REGNO (XEXP (links, 0)))++; 5035 } 5036 5037 /* This call will increment REG_N_REFS by the current loop depth for 5038 each reference to a register in INSN. */ 5039 count_reg_references (PATTERN (insn)); 5040 5041 /* count_reg_references will not include counts for arguments to 5042 function calls, so detect them here by examining the 5043 CALL_INSN_FUNCTION_USAGE data. */ 5044 if (GET_CODE (insn) == CALL_INSN) 5045 { 5046 rtx note; 5047 5048 for (note = CALL_INSN_FUNCTION_USAGE (insn); 5049 note; 5050 note = XEXP (note, 1)) 5051 if (GET_CODE (XEXP (note, 0)) == USE) 5052 count_reg_references (SET_DEST (XEXP (note, 0))); 5053 } 5054 } 5055 } 5056} 5057 5058/* Record INSN's block as BB. */ 5059 5060void 5061set_block_for_insn (insn, bb) 5062 rtx insn; 5063 basic_block bb; 5064{ 5065 size_t uid = INSN_UID (insn); 5066 if (uid >= basic_block_for_insn->num_elements) 5067 { 5068 int new_size; 5069 5070 /* Add one-eighth the size so we don't keep calling xrealloc. */ 5071 new_size = uid + (uid + 7) / 8; 5072 5073 VARRAY_GROW (basic_block_for_insn, new_size); 5074 } 5075 VARRAY_BB (basic_block_for_insn, uid) = bb; 5076} 5077 5078/* Record INSN's block number as BB. */ 5079/* ??? This has got to go. */ 5080 5081void 5082set_block_num (insn, bb) 5083 rtx insn; 5084 int bb; 5085{ 5086 set_block_for_insn (insn, BASIC_BLOCK (bb)); 5087} 5088 5089/* Verify the CFG consistency. This function check some CFG invariants and 5090 aborts when something is wrong. Hope that this function will help to 5091 convert many optimization passes to preserve CFG consistent. 5092 5093 Currently it does following checks: 5094 5095 - test head/end pointers 5096 - overlapping of basic blocks 5097 - edge list corectness 5098 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note) 5099 - tails of basic blocks (ensure that boundary is necesary) 5100 - scans body of the basic block for JUMP_INSN, CODE_LABEL 5101 and NOTE_INSN_BASIC_BLOCK 5102 - check that all insns are in the basic blocks 5103 (except the switch handling code, barriers and notes) 5104 5105 In future it can be extended check a lot of other stuff as well 5106 (reachability of basic blocks, life information, etc. etc.). */ 5107 5108void 5109verify_flow_info () 5110{ 5111 const int max_uid = get_max_uid (); 5112 const rtx rtx_first = get_insns (); 5113 basic_block *bb_info; 5114 rtx x; 5115 int i; 5116 5117 bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block)); 5118 memset (bb_info, 0, max_uid * sizeof (basic_block)); 5119 5120 /* First pass check head/end pointers and set bb_info array used by 5121 later passes. */ 5122 for (i = n_basic_blocks - 1; i >= 0; i--) 5123 { 5124 basic_block bb = BASIC_BLOCK (i); 5125 5126 /* Check the head pointer and make sure that it is pointing into 5127 insn list. */ 5128 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x)) 5129 if (x == bb->head) 5130 break; 5131 if (!x) 5132 { 5133 fatal ("verify_flow_info: Head insn %d for block %d not found in the insn stream.\n", 5134 INSN_UID (bb->head), bb->index); 5135 } 5136 5137 /* Check the end pointer and make sure that it is pointing into 5138 insn list. */ 5139 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x)) 5140 { 5141 if (bb_info[INSN_UID (x)] != NULL) 5142 { 5143 fatal ("verify_flow_info: Insn %d is in multiple basic blocks (%d and %d)", 5144 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index); 5145 } 5146 bb_info[INSN_UID (x)] = bb; 5147 5148 if (x == bb->end) 5149 break; 5150 } 5151 if (!x) 5152 { 5153 fatal ("verify_flow_info: End insn %d for block %d not found in the insn stream.\n", 5154 INSN_UID (bb->end), bb->index); 5155 } 5156 } 5157 5158 /* Now check the basic blocks (boundaries etc.) */ 5159 for (i = n_basic_blocks - 1; i >= 0; i--) 5160 { 5161 basic_block bb = BASIC_BLOCK (i); 5162 /* Check corectness of edge lists */ 5163 edge e; 5164 5165 e = bb->succ; 5166 while (e) 5167 { 5168 if (e->src != bb) 5169 { 5170 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n", 5171 bb->index); 5172 fprintf (stderr, "Predecessor: "); 5173 dump_edge_info (stderr, e, 0); 5174 fprintf (stderr, "\nSuccessor: "); 5175 dump_edge_info (stderr, e, 1); 5176 fflush (stderr); 5177 abort (); 5178 } 5179 if (e->dest != EXIT_BLOCK_PTR) 5180 { 5181 edge e2 = e->dest->pred; 5182 while (e2 && e2 != e) 5183 e2 = e2->pred_next; 5184 if (!e2) 5185 { 5186 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n", 5187 bb->index); 5188 } 5189 } 5190 e = e->succ_next; 5191 } 5192 5193 e = bb->pred; 5194 while (e) 5195 { 5196 if (e->dest != bb) 5197 { 5198 fprintf (stderr, "verify_flow_info: Basic block %d pred edge is corrupted\n", 5199 bb->index); 5200 fprintf (stderr, "Predecessor: "); 5201 dump_edge_info (stderr, e, 0); 5202 fprintf (stderr, "\nSuccessor: "); 5203 dump_edge_info (stderr, e, 1); 5204 fflush (stderr); 5205 abort (); 5206 } 5207 if (e->src != ENTRY_BLOCK_PTR) 5208 { 5209 edge e2 = e->src->succ; 5210 while (e2 && e2 != e) 5211 e2 = e2->succ_next; 5212 if (!e2) 5213 { 5214 fatal ("verify_flow_info: Basic block %i edge lists are corrupted\n", 5215 bb->index); 5216 } 5217 } 5218 e = e->pred_next; 5219 } 5220 5221 /* OK pointers are correct. Now check the header of basic 5222 block. It ought to contain optional CODE_LABEL followed 5223 by NOTE_BASIC_BLOCK. */ 5224 x = bb->head; 5225 if (GET_CODE (x) == CODE_LABEL) 5226 { 5227 if (bb->end == x) 5228 { 5229 fatal ("verify_flow_info: Basic block contains only CODE_LABEL and no NOTE_INSN_BASIC_BLOCK note\n"); 5230 } 5231 x = NEXT_INSN (x); 5232 } 5233 if (GET_CODE (x) != NOTE 5234 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK 5235 || NOTE_BASIC_BLOCK (x) != bb) 5236 { 5237 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK is missing for block %d\n", 5238 bb->index); 5239 } 5240 5241 if (bb->end == x) 5242 { 5243 /* Do checks for empty blocks here */ 5244 } 5245 else 5246 { 5247 x = NEXT_INSN (x); 5248 while (x) 5249 { 5250 if (GET_CODE (x) == NOTE 5251 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK) 5252 { 5253 fatal ("verify_flow_info: NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d\n", 5254 INSN_UID (x), bb->index); 5255 } 5256 5257 if (x == bb->end) 5258 break; 5259 5260 if (GET_CODE (x) == JUMP_INSN 5261 || GET_CODE (x) == CODE_LABEL 5262 || GET_CODE (x) == BARRIER) 5263 { 5264 fatal_insn ("verify_flow_info: Incorrect insn in the middle of basic block %d\n", 5265 x, bb->index); 5266 } 5267 5268 x = NEXT_INSN (x); 5269 } 5270 } 5271 } 5272 5273 x = rtx_first; 5274 while (x) 5275 { 5276 if (!bb_info[INSN_UID (x)]) 5277 { 5278 switch (GET_CODE (x)) 5279 { 5280 case BARRIER: 5281 case NOTE: 5282 break; 5283 5284 case CODE_LABEL: 5285 /* An addr_vec is placed outside any block block. */ 5286 if (NEXT_INSN (x) 5287 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN 5288 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC 5289 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC)) 5290 { 5291 x = NEXT_INSN (x); 5292 } 5293 5294 /* But in any case, non-deletable labels can appear anywhere. */ 5295 break; 5296 5297 default: 5298 fatal_insn ("verify_flow_info: Insn outside basic block\n", x); 5299 } 5300 } 5301 5302 x = NEXT_INSN (x); 5303 } 5304} 5305