rtlanal.c revision 96263
1/* Analyze RTL for C-Compiler 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 3 1999, 2000, 2001, 2002 Free Software Foundation, Inc. 4 5This file is part of GCC. 6 7GCC is free software; you can redistribute it and/or modify it under 8the terms of the GNU General Public License as published by the Free 9Software Foundation; either version 2, or (at your option) any later 10version. 11 12GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13WARRANTY; without even the implied warranty of MERCHANTABILITY or 14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15for more details. 16 17You should have received a copy of the GNU General Public License 18along with GCC; see the file COPYING. If not, write to the Free 19Software Foundation, 59 Temple Place - Suite 330, Boston, MA 2002111-1307, USA. */ 21 22 23#include "config.h" 24#include "system.h" 25#include "toplev.h" 26#include "rtl.h" 27#include "hard-reg-set.h" 28#include "tm_p.h" 29 30/* Forward declarations */ 31static void set_of_1 PARAMS ((rtx, rtx, void *)); 32static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *)); 33static int computed_jump_p_1 PARAMS ((rtx)); 34static void parms_set PARAMS ((rtx, rtx, void *)); 35 36/* Bit flags that specify the machine subtype we are compiling for. 37 Bits are tested using macros TARGET_... defined in the tm.h file 38 and set by `-m...' switches. Must be defined in rtlanal.c. */ 39 40int target_flags; 41 42/* Return 1 if the value of X is unstable 43 (would be different at a different point in the program). 44 The frame pointer, arg pointer, etc. are considered stable 45 (within one function) and so is anything marked `unchanging'. */ 46 47int 48rtx_unstable_p (x) 49 rtx x; 50{ 51 RTX_CODE code = GET_CODE (x); 52 int i; 53 const char *fmt; 54 55 switch (code) 56 { 57 case MEM: 58 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0)); 59 60 case QUEUED: 61 return 1; 62 63 case ADDRESSOF: 64 case CONST: 65 case CONST_INT: 66 case CONST_DOUBLE: 67 case CONST_VECTOR: 68 case SYMBOL_REF: 69 case LABEL_REF: 70 return 0; 71 72 case REG: 73 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ 74 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx 75 /* The arg pointer varies if it is not a fixed register. */ 76 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]) 77 || RTX_UNCHANGING_P (x)) 78 return 0; 79#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED 80 /* ??? When call-clobbered, the value is stable modulo the restore 81 that must happen after a call. This currently screws up local-alloc 82 into believing that the restore is not needed. */ 83 if (x == pic_offset_table_rtx) 84 return 0; 85#endif 86 return 1; 87 88 case ASM_OPERANDS: 89 if (MEM_VOLATILE_P (x)) 90 return 1; 91 92 /* FALLTHROUGH */ 93 94 default: 95 break; 96 } 97 98 fmt = GET_RTX_FORMAT (code); 99 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 100 if (fmt[i] == 'e') 101 { 102 if (rtx_unstable_p (XEXP (x, i))) 103 return 1; 104 } 105 else if (fmt[i] == 'E') 106 { 107 int j; 108 for (j = 0; j < XVECLEN (x, i); j++) 109 if (rtx_unstable_p (XVECEXP (x, i, j))) 110 return 1; 111 } 112 113 return 0; 114} 115 116/* Return 1 if X has a value that can vary even between two 117 executions of the program. 0 means X can be compared reliably 118 against certain constants or near-constants. 119 FOR_ALIAS is nonzero if we are called from alias analysis; if it is 120 zero, we are slightly more conservative. 121 The frame pointer and the arg pointer are considered constant. */ 122 123int 124rtx_varies_p (x, for_alias) 125 rtx x; 126 int for_alias; 127{ 128 RTX_CODE code = GET_CODE (x); 129 int i; 130 const char *fmt; 131 132 switch (code) 133 { 134 case MEM: 135 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); 136 137 case QUEUED: 138 return 1; 139 140 case CONST: 141 case CONST_INT: 142 case CONST_DOUBLE: 143 case CONST_VECTOR: 144 case SYMBOL_REF: 145 case LABEL_REF: 146 return 0; 147 148 case REG: 149 /* Note that we have to test for the actual rtx used for the frame 150 and arg pointers and not just the register number in case we have 151 eliminated the frame and/or arg pointer and are using it 152 for pseudos. */ 153 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx 154 /* The arg pointer varies if it is not a fixed register. */ 155 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) 156 return 0; 157 if (x == pic_offset_table_rtx 158#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED 159 /* ??? When call-clobbered, the value is stable modulo the restore 160 that must happen after a call. This currently screws up 161 local-alloc into believing that the restore is not needed, so we 162 must return 0 only if we are called from alias analysis. */ 163 && for_alias 164#endif 165 ) 166 return 0; 167 return 1; 168 169 case LO_SUM: 170 /* The operand 0 of a LO_SUM is considered constant 171 (in fact it is related specifically to operand 1) 172 during alias analysis. */ 173 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias)) 174 || rtx_varies_p (XEXP (x, 1), for_alias); 175 176 case ASM_OPERANDS: 177 if (MEM_VOLATILE_P (x)) 178 return 1; 179 180 /* FALLTHROUGH */ 181 182 default: 183 break; 184 } 185 186 fmt = GET_RTX_FORMAT (code); 187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 188 if (fmt[i] == 'e') 189 { 190 if (rtx_varies_p (XEXP (x, i), for_alias)) 191 return 1; 192 } 193 else if (fmt[i] == 'E') 194 { 195 int j; 196 for (j = 0; j < XVECLEN (x, i); j++) 197 if (rtx_varies_p (XVECEXP (x, i, j), for_alias)) 198 return 1; 199 } 200 201 return 0; 202} 203 204/* Return 0 if the use of X as an address in a MEM can cause a trap. */ 205 206int 207rtx_addr_can_trap_p (x) 208 rtx x; 209{ 210 enum rtx_code code = GET_CODE (x); 211 212 switch (code) 213 { 214 case SYMBOL_REF: 215 return SYMBOL_REF_WEAK (x); 216 217 case LABEL_REF: 218 return 0; 219 220 case REG: 221 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ 222 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx 223 || x == stack_pointer_rtx 224 /* The arg pointer varies if it is not a fixed register. */ 225 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) 226 return 0; 227 /* All of the virtual frame registers are stack references. */ 228 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER 229 && REGNO (x) <= LAST_VIRTUAL_REGISTER) 230 return 0; 231 return 1; 232 233 case CONST: 234 return rtx_addr_can_trap_p (XEXP (x, 0)); 235 236 case PLUS: 237 /* An address is assumed not to trap if it is an address that can't 238 trap plus a constant integer or it is the pic register plus a 239 constant. */ 240 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0)) 241 && GET_CODE (XEXP (x, 1)) == CONST_INT) 242 || (XEXP (x, 0) == pic_offset_table_rtx 243 && CONSTANT_P (XEXP (x, 1)))); 244 245 case LO_SUM: 246 case PRE_MODIFY: 247 return rtx_addr_can_trap_p (XEXP (x, 1)); 248 249 case PRE_DEC: 250 case PRE_INC: 251 case POST_DEC: 252 case POST_INC: 253 case POST_MODIFY: 254 return rtx_addr_can_trap_p (XEXP (x, 0)); 255 256 default: 257 break; 258 } 259 260 /* If it isn't one of the case above, it can cause a trap. */ 261 return 1; 262} 263 264/* Return 1 if X refers to a memory location whose address 265 cannot be compared reliably with constant addresses, 266 or if X refers to a BLKmode memory object. 267 FOR_ALIAS is nonzero if we are called from alias analysis; if it is 268 zero, we are slightly more conservative. */ 269 270int 271rtx_addr_varies_p (x, for_alias) 272 rtx x; 273 int for_alias; 274{ 275 enum rtx_code code; 276 int i; 277 const char *fmt; 278 279 if (x == 0) 280 return 0; 281 282 code = GET_CODE (x); 283 if (code == MEM) 284 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias); 285 286 fmt = GET_RTX_FORMAT (code); 287 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 288 if (fmt[i] == 'e') 289 { 290 if (rtx_addr_varies_p (XEXP (x, i), for_alias)) 291 return 1; 292 } 293 else if (fmt[i] == 'E') 294 { 295 int j; 296 for (j = 0; j < XVECLEN (x, i); j++) 297 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias)) 298 return 1; 299 } 300 return 0; 301} 302 303/* Return the value of the integer term in X, if one is apparent; 304 otherwise return 0. 305 Only obvious integer terms are detected. 306 This is used in cse.c with the `related_value' field. */ 307 308HOST_WIDE_INT 309get_integer_term (x) 310 rtx x; 311{ 312 if (GET_CODE (x) == CONST) 313 x = XEXP (x, 0); 314 315 if (GET_CODE (x) == MINUS 316 && GET_CODE (XEXP (x, 1)) == CONST_INT) 317 return - INTVAL (XEXP (x, 1)); 318 if (GET_CODE (x) == PLUS 319 && GET_CODE (XEXP (x, 1)) == CONST_INT) 320 return INTVAL (XEXP (x, 1)); 321 return 0; 322} 323 324/* If X is a constant, return the value sans apparent integer term; 325 otherwise return 0. 326 Only obvious integer terms are detected. */ 327 328rtx 329get_related_value (x) 330 rtx x; 331{ 332 if (GET_CODE (x) != CONST) 333 return 0; 334 x = XEXP (x, 0); 335 if (GET_CODE (x) == PLUS 336 && GET_CODE (XEXP (x, 1)) == CONST_INT) 337 return XEXP (x, 0); 338 else if (GET_CODE (x) == MINUS 339 && GET_CODE (XEXP (x, 1)) == CONST_INT) 340 return XEXP (x, 0); 341 return 0; 342} 343 344/* Given a tablejump insn INSN, return the RTL expression for the offset 345 into the jump table. If the offset cannot be determined, then return 346 NULL_RTX. 347 348 If EARLIEST is non-zero, it is a pointer to a place where the earliest 349 insn used in locating the offset was found. */ 350 351rtx 352get_jump_table_offset (insn, earliest) 353 rtx insn; 354 rtx *earliest; 355{ 356 rtx label; 357 rtx table; 358 rtx set; 359 rtx old_insn; 360 rtx x; 361 rtx old_x; 362 rtx y; 363 rtx old_y; 364 int i; 365 366 if (GET_CODE (insn) != JUMP_INSN 367 || ! (label = JUMP_LABEL (insn)) 368 || ! (table = NEXT_INSN (label)) 369 || GET_CODE (table) != JUMP_INSN 370 || (GET_CODE (PATTERN (table)) != ADDR_VEC 371 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC) 372 || ! (set = single_set (insn))) 373 return NULL_RTX; 374 375 x = SET_SRC (set); 376 377 /* Some targets (eg, ARM) emit a tablejump that also 378 contains the out-of-range target. */ 379 if (GET_CODE (x) == IF_THEN_ELSE 380 && GET_CODE (XEXP (x, 2)) == LABEL_REF) 381 x = XEXP (x, 1); 382 383 /* Search backwards and locate the expression stored in X. */ 384 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x; 385 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) 386 ; 387 388 /* If X is an expression using a relative address then strip 389 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM, 390 or the jump table label. */ 391 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC 392 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)) 393 { 394 for (i = 0; i < 2; i++) 395 { 396 old_insn = insn; 397 y = XEXP (x, i); 398 399 if (y == pc_rtx || y == pic_offset_table_rtx) 400 break; 401 402 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y; 403 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0)) 404 ; 405 406 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label)) 407 break; 408 } 409 410 if (i >= 2) 411 return NULL_RTX; 412 413 x = XEXP (x, 1 - i); 414 415 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x; 416 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) 417 ; 418 } 419 420 /* Strip off any sign or zero extension. */ 421 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND) 422 { 423 x = XEXP (x, 0); 424 425 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x; 426 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) 427 ; 428 } 429 430 /* If X isn't a MEM then this isn't a tablejump we understand. */ 431 if (GET_CODE (x) != MEM) 432 return NULL_RTX; 433 434 /* Strip off the MEM. */ 435 x = XEXP (x, 0); 436 437 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x; 438 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) 439 ; 440 441 /* If X isn't a PLUS than this isn't a tablejump we understand. */ 442 if (GET_CODE (x) != PLUS) 443 return NULL_RTX; 444 445 /* At this point we should have an expression representing the jump table 446 plus an offset. Examine each operand in order to determine which one 447 represents the jump table. Knowing that tells us that the other operand 448 must represent the offset. */ 449 for (i = 0; i < 2; i++) 450 { 451 old_insn = insn; 452 y = XEXP (x, i); 453 454 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y; 455 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0)) 456 ; 457 458 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF) 459 && reg_mentioned_p (label, y)) 460 break; 461 } 462 463 if (i >= 2) 464 return NULL_RTX; 465 466 x = XEXP (x, 1 - i); 467 468 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */ 469 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS) 470 for (i = 0; i < 2; i++) 471 if (XEXP (x, i) == pic_offset_table_rtx) 472 { 473 x = XEXP (x, 1 - i); 474 break; 475 } 476 477 if (earliest) 478 *earliest = insn; 479 480 /* Return the RTL expression representing the offset. */ 481 return x; 482} 483 484/* Return the number of places FIND appears within X. If COUNT_DEST is 485 zero, we do not count occurrences inside the destination of a SET. */ 486 487int 488count_occurrences (x, find, count_dest) 489 rtx x, find; 490 int count_dest; 491{ 492 int i, j; 493 enum rtx_code code; 494 const char *format_ptr; 495 int count; 496 497 if (x == find) 498 return 1; 499 500 code = GET_CODE (x); 501 502 switch (code) 503 { 504 case REG: 505 case CONST_INT: 506 case CONST_DOUBLE: 507 case CONST_VECTOR: 508 case SYMBOL_REF: 509 case CODE_LABEL: 510 case PC: 511 case CC0: 512 return 0; 513 514 case MEM: 515 if (GET_CODE (find) == MEM && rtx_equal_p (x, find)) 516 return 1; 517 break; 518 519 case SET: 520 if (SET_DEST (x) == find && ! count_dest) 521 return count_occurrences (SET_SRC (x), find, count_dest); 522 break; 523 524 default: 525 break; 526 } 527 528 format_ptr = GET_RTX_FORMAT (code); 529 count = 0; 530 531 for (i = 0; i < GET_RTX_LENGTH (code); i++) 532 { 533 switch (*format_ptr++) 534 { 535 case 'e': 536 count += count_occurrences (XEXP (x, i), find, count_dest); 537 break; 538 539 case 'E': 540 for (j = 0; j < XVECLEN (x, i); j++) 541 count += count_occurrences (XVECEXP (x, i, j), find, count_dest); 542 break; 543 } 544 } 545 return count; 546} 547 548/* Nonzero if register REG appears somewhere within IN. 549 Also works if REG is not a register; in this case it checks 550 for a subexpression of IN that is Lisp "equal" to REG. */ 551 552int 553reg_mentioned_p (reg, in) 554 rtx reg, in; 555{ 556 const char *fmt; 557 int i; 558 enum rtx_code code; 559 560 if (in == 0) 561 return 0; 562 563 if (reg == in) 564 return 1; 565 566 if (GET_CODE (in) == LABEL_REF) 567 return reg == XEXP (in, 0); 568 569 code = GET_CODE (in); 570 571 switch (code) 572 { 573 /* Compare registers by number. */ 574 case REG: 575 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg); 576 577 /* These codes have no constituent expressions 578 and are unique. */ 579 case SCRATCH: 580 case CC0: 581 case PC: 582 return 0; 583 584 case CONST_INT: 585 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg); 586 587 case CONST_VECTOR: 588 case CONST_DOUBLE: 589 /* These are kept unique for a given value. */ 590 return 0; 591 592 default: 593 break; 594 } 595 596 if (GET_CODE (reg) == code && rtx_equal_p (reg, in)) 597 return 1; 598 599 fmt = GET_RTX_FORMAT (code); 600 601 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 602 { 603 if (fmt[i] == 'E') 604 { 605 int j; 606 for (j = XVECLEN (in, i) - 1; j >= 0; j--) 607 if (reg_mentioned_p (reg, XVECEXP (in, i, j))) 608 return 1; 609 } 610 else if (fmt[i] == 'e' 611 && reg_mentioned_p (reg, XEXP (in, i))) 612 return 1; 613 } 614 return 0; 615} 616 617/* Return 1 if in between BEG and END, exclusive of BEG and END, there is 618 no CODE_LABEL insn. */ 619 620int 621no_labels_between_p (beg, end) 622 rtx beg, end; 623{ 624 rtx p; 625 if (beg == end) 626 return 0; 627 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) 628 if (GET_CODE (p) == CODE_LABEL) 629 return 0; 630 return 1; 631} 632 633/* Return 1 if in between BEG and END, exclusive of BEG and END, there is 634 no JUMP_INSN insn. */ 635 636int 637no_jumps_between_p (beg, end) 638 rtx beg, end; 639{ 640 rtx p; 641 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) 642 if (GET_CODE (p) == JUMP_INSN) 643 return 0; 644 return 1; 645} 646 647/* Nonzero if register REG is used in an insn between 648 FROM_INSN and TO_INSN (exclusive of those two). */ 649 650int 651reg_used_between_p (reg, from_insn, to_insn) 652 rtx reg, from_insn, to_insn; 653{ 654 rtx insn; 655 656 if (from_insn == to_insn) 657 return 0; 658 659 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) 660 if (INSN_P (insn) 661 && (reg_overlap_mentioned_p (reg, PATTERN (insn)) 662 || (GET_CODE (insn) == CALL_INSN 663 && (find_reg_fusage (insn, USE, reg) 664 || find_reg_fusage (insn, CLOBBER, reg))))) 665 return 1; 666 return 0; 667} 668 669/* Nonzero if the old value of X, a register, is referenced in BODY. If X 670 is entirely replaced by a new value and the only use is as a SET_DEST, 671 we do not consider it a reference. */ 672 673int 674reg_referenced_p (x, body) 675 rtx x; 676 rtx body; 677{ 678 int i; 679 680 switch (GET_CODE (body)) 681 { 682 case SET: 683 if (reg_overlap_mentioned_p (x, SET_SRC (body))) 684 return 1; 685 686 /* If the destination is anything other than CC0, PC, a REG or a SUBREG 687 of a REG that occupies all of the REG, the insn references X if 688 it is mentioned in the destination. */ 689 if (GET_CODE (SET_DEST (body)) != CC0 690 && GET_CODE (SET_DEST (body)) != PC 691 && GET_CODE (SET_DEST (body)) != REG 692 && ! (GET_CODE (SET_DEST (body)) == SUBREG 693 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG 694 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body)))) 695 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) 696 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body))) 697 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) 698 && reg_overlap_mentioned_p (x, SET_DEST (body))) 699 return 1; 700 return 0; 701 702 case ASM_OPERANDS: 703 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) 704 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i))) 705 return 1; 706 return 0; 707 708 case CALL: 709 case USE: 710 case IF_THEN_ELSE: 711 return reg_overlap_mentioned_p (x, body); 712 713 case TRAP_IF: 714 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body)); 715 716 case PREFETCH: 717 return reg_overlap_mentioned_p (x, XEXP (body, 0)); 718 719 case UNSPEC: 720 case UNSPEC_VOLATILE: 721 for (i = XVECLEN (body, 0) - 1; i >= 0; i--) 722 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i))) 723 return 1; 724 return 0; 725 726 case PARALLEL: 727 for (i = XVECLEN (body, 0) - 1; i >= 0; i--) 728 if (reg_referenced_p (x, XVECEXP (body, 0, i))) 729 return 1; 730 return 0; 731 732 case CLOBBER: 733 if (GET_CODE (XEXP (body, 0)) == MEM) 734 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) 735 return 1; 736 return 0; 737 738 case COND_EXEC: 739 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) 740 return 1; 741 return reg_referenced_p (x, COND_EXEC_CODE (body)); 742 743 default: 744 return 0; 745 } 746} 747 748/* Nonzero if register REG is referenced in an insn between 749 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do 750 not count. */ 751 752int 753reg_referenced_between_p (reg, from_insn, to_insn) 754 rtx reg, from_insn, to_insn; 755{ 756 rtx insn; 757 758 if (from_insn == to_insn) 759 return 0; 760 761 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) 762 if (INSN_P (insn) 763 && (reg_referenced_p (reg, PATTERN (insn)) 764 || (GET_CODE (insn) == CALL_INSN 765 && find_reg_fusage (insn, USE, reg)))) 766 return 1; 767 return 0; 768} 769 770/* Nonzero if register REG is set or clobbered in an insn between 771 FROM_INSN and TO_INSN (exclusive of those two). */ 772 773int 774reg_set_between_p (reg, from_insn, to_insn) 775 rtx reg, from_insn, to_insn; 776{ 777 rtx insn; 778 779 if (from_insn == to_insn) 780 return 0; 781 782 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) 783 if (INSN_P (insn) && reg_set_p (reg, insn)) 784 return 1; 785 return 0; 786} 787 788/* Internals of reg_set_between_p. */ 789int 790reg_set_p (reg, insn) 791 rtx reg, insn; 792{ 793 rtx body = insn; 794 795 /* We can be passed an insn or part of one. If we are passed an insn, 796 check if a side-effect of the insn clobbers REG. */ 797 if (INSN_P (insn)) 798 { 799 if (FIND_REG_INC_NOTE (insn, reg) 800 || (GET_CODE (insn) == CALL_INSN 801 /* We'd like to test call_used_regs here, but rtlanal.c can't 802 reference that variable due to its use in genattrtab. So 803 we'll just be more conservative. 804 805 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE 806 information holds all clobbered registers. */ 807 && ((GET_CODE (reg) == REG 808 && REGNO (reg) < FIRST_PSEUDO_REGISTER) 809 || GET_CODE (reg) == MEM 810 || find_reg_fusage (insn, CLOBBER, reg)))) 811 return 1; 812 813 body = PATTERN (insn); 814 } 815 816 return set_of (reg, insn) != NULL_RTX; 817} 818 819/* Similar to reg_set_between_p, but check all registers in X. Return 0 820 only if none of them are modified between START and END. Do not 821 consider non-registers one way or the other. */ 822 823int 824regs_set_between_p (x, start, end) 825 rtx x; 826 rtx start, end; 827{ 828 enum rtx_code code = GET_CODE (x); 829 const char *fmt; 830 int i, j; 831 832 switch (code) 833 { 834 case CONST_INT: 835 case CONST_DOUBLE: 836 case CONST_VECTOR: 837 case CONST: 838 case SYMBOL_REF: 839 case LABEL_REF: 840 case PC: 841 case CC0: 842 return 0; 843 844 case REG: 845 return reg_set_between_p (x, start, end); 846 847 default: 848 break; 849 } 850 851 fmt = GET_RTX_FORMAT (code); 852 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 853 { 854 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end)) 855 return 1; 856 857 else if (fmt[i] == 'E') 858 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 859 if (regs_set_between_p (XVECEXP (x, i, j), start, end)) 860 return 1; 861 } 862 863 return 0; 864} 865 866/* Similar to reg_set_between_p, but check all registers in X. Return 0 867 only if none of them are modified between START and END. Return 1 if 868 X contains a MEM; this routine does not perform any memory aliasing. */ 869 870int 871modified_between_p (x, start, end) 872 rtx x; 873 rtx start, end; 874{ 875 enum rtx_code code = GET_CODE (x); 876 const char *fmt; 877 int i, j; 878 879 switch (code) 880 { 881 case CONST_INT: 882 case CONST_DOUBLE: 883 case CONST_VECTOR: 884 case CONST: 885 case SYMBOL_REF: 886 case LABEL_REF: 887 return 0; 888 889 case PC: 890 case CC0: 891 return 1; 892 893 case MEM: 894 /* If the memory is not constant, assume it is modified. If it is 895 constant, we still have to check the address. */ 896 if (! RTX_UNCHANGING_P (x)) 897 return 1; 898 break; 899 900 case REG: 901 return reg_set_between_p (x, start, end); 902 903 default: 904 break; 905 } 906 907 fmt = GET_RTX_FORMAT (code); 908 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 909 { 910 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end)) 911 return 1; 912 913 else if (fmt[i] == 'E') 914 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 915 if (modified_between_p (XVECEXP (x, i, j), start, end)) 916 return 1; 917 } 918 919 return 0; 920} 921 922/* Similar to reg_set_p, but check all registers in X. Return 0 only if none 923 of them are modified in INSN. Return 1 if X contains a MEM; this routine 924 does not perform any memory aliasing. */ 925 926int 927modified_in_p (x, insn) 928 rtx x; 929 rtx insn; 930{ 931 enum rtx_code code = GET_CODE (x); 932 const char *fmt; 933 int i, j; 934 935 switch (code) 936 { 937 case CONST_INT: 938 case CONST_DOUBLE: 939 case CONST_VECTOR: 940 case CONST: 941 case SYMBOL_REF: 942 case LABEL_REF: 943 return 0; 944 945 case PC: 946 case CC0: 947 return 1; 948 949 case MEM: 950 /* If the memory is not constant, assume it is modified. If it is 951 constant, we still have to check the address. */ 952 if (! RTX_UNCHANGING_P (x)) 953 return 1; 954 break; 955 956 case REG: 957 return reg_set_p (x, insn); 958 959 default: 960 break; 961 } 962 963 fmt = GET_RTX_FORMAT (code); 964 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 965 { 966 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn)) 967 return 1; 968 969 else if (fmt[i] == 'E') 970 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 971 if (modified_in_p (XVECEXP (x, i, j), insn)) 972 return 1; 973 } 974 975 return 0; 976} 977 978/* Return true if anything in insn X is (anti,output,true) dependent on 979 anything in insn Y. */ 980 981int 982insn_dependent_p (x, y) 983 rtx x, y; 984{ 985 rtx tmp; 986 987 if (! INSN_P (x) || ! INSN_P (y)) 988 abort (); 989 990 tmp = PATTERN (y); 991 note_stores (PATTERN (x), insn_dependent_p_1, &tmp); 992 if (tmp == NULL_RTX) 993 return 1; 994 995 tmp = PATTERN (x); 996 note_stores (PATTERN (y), insn_dependent_p_1, &tmp); 997 if (tmp == NULL_RTX) 998 return 1; 999 1000 return 0; 1001} 1002 1003/* A helper routine for insn_dependent_p called through note_stores. */ 1004 1005static void 1006insn_dependent_p_1 (x, pat, data) 1007 rtx x; 1008 rtx pat ATTRIBUTE_UNUSED; 1009 void *data; 1010{ 1011 rtx * pinsn = (rtx *) data; 1012 1013 if (*pinsn && reg_mentioned_p (x, *pinsn)) 1014 *pinsn = NULL_RTX; 1015} 1016 1017/* Helper function for set_of. */ 1018struct set_of_data 1019 { 1020 rtx found; 1021 rtx pat; 1022 }; 1023 1024static void 1025set_of_1 (x, pat, data1) 1026 rtx x; 1027 rtx pat; 1028 void *data1; 1029{ 1030 struct set_of_data *data = (struct set_of_data *) (data1); 1031 if (rtx_equal_p (x, data->pat) 1032 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x))) 1033 data->found = pat; 1034} 1035 1036/* Give an INSN, return a SET or CLOBBER expression that does modify PAT 1037 (either directly or via STRICT_LOW_PART and similar modifiers). */ 1038rtx 1039set_of (pat, insn) 1040 rtx pat, insn; 1041{ 1042 struct set_of_data data; 1043 data.found = NULL_RTX; 1044 data.pat = pat; 1045 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data); 1046 return data.found; 1047} 1048 1049/* Given an INSN, return a SET expression if this insn has only a single SET. 1050 It may also have CLOBBERs, USEs, or SET whose output 1051 will not be used, which we ignore. */ 1052 1053rtx 1054single_set_2 (insn, pat) 1055 rtx insn, pat; 1056{ 1057 rtx set = NULL; 1058 int set_verified = 1; 1059 int i; 1060 1061 if (GET_CODE (pat) == PARALLEL) 1062 { 1063 for (i = 0; i < XVECLEN (pat, 0); i++) 1064 { 1065 rtx sub = XVECEXP (pat, 0, i); 1066 switch (GET_CODE (sub)) 1067 { 1068 case USE: 1069 case CLOBBER: 1070 break; 1071 1072 case SET: 1073 /* We can consider insns having multiple sets, where all 1074 but one are dead as single set insns. In common case 1075 only single set is present in the pattern so we want 1076 to avoid checking for REG_UNUSED notes unless necessary. 1077 1078 When we reach set first time, we just expect this is 1079 the single set we are looking for and only when more 1080 sets are found in the insn, we check them. */ 1081 if (!set_verified) 1082 { 1083 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set)) 1084 && !side_effects_p (set)) 1085 set = NULL; 1086 else 1087 set_verified = 1; 1088 } 1089 if (!set) 1090 set = sub, set_verified = 0; 1091 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub)) 1092 || side_effects_p (sub)) 1093 return NULL_RTX; 1094 break; 1095 1096 default: 1097 return NULL_RTX; 1098 } 1099 } 1100 } 1101 return set; 1102} 1103 1104/* Given an INSN, return nonzero if it has more than one SET, else return 1105 zero. */ 1106 1107int 1108multiple_sets (insn) 1109 rtx insn; 1110{ 1111 int found; 1112 int i; 1113 1114 /* INSN must be an insn. */ 1115 if (! INSN_P (insn)) 1116 return 0; 1117 1118 /* Only a PARALLEL can have multiple SETs. */ 1119 if (GET_CODE (PATTERN (insn)) == PARALLEL) 1120 { 1121 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++) 1122 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET) 1123 { 1124 /* If we have already found a SET, then return now. */ 1125 if (found) 1126 return 1; 1127 else 1128 found = 1; 1129 } 1130 } 1131 1132 /* Either zero or one SET. */ 1133 return 0; 1134} 1135 1136/* Return nonzero if the destination of SET equals the source 1137 and there are no side effects. */ 1138 1139int 1140set_noop_p (set) 1141 rtx set; 1142{ 1143 rtx src = SET_SRC (set); 1144 rtx dst = SET_DEST (set); 1145 1146 if (side_effects_p (src) || side_effects_p (dst)) 1147 return 0; 1148 1149 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM) 1150 return rtx_equal_p (dst, src); 1151 1152 if (dst == pc_rtx && src == pc_rtx) 1153 return 1; 1154 1155 if (GET_CODE (dst) == SIGN_EXTRACT 1156 || GET_CODE (dst) == ZERO_EXTRACT) 1157 return rtx_equal_p (XEXP (dst, 0), src) 1158 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx; 1159 1160 if (GET_CODE (dst) == STRICT_LOW_PART) 1161 dst = XEXP (dst, 0); 1162 1163 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) 1164 { 1165 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst)) 1166 return 0; 1167 src = SUBREG_REG (src); 1168 dst = SUBREG_REG (dst); 1169 } 1170 1171 return (GET_CODE (src) == REG && GET_CODE (dst) == REG 1172 && REGNO (src) == REGNO (dst)); 1173} 1174 1175/* Return nonzero if an insn consists only of SETs, each of which only sets a 1176 value to itself. */ 1177 1178int 1179noop_move_p (insn) 1180 rtx insn; 1181{ 1182 rtx pat = PATTERN (insn); 1183 1184 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE) 1185 return 1; 1186 1187 /* Insns carrying these notes are useful later on. */ 1188 if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) 1189 return 0; 1190 1191 /* For now treat an insn with a REG_RETVAL note as a 1192 a special insn which should not be considered a no-op. */ 1193 if (find_reg_note (insn, REG_RETVAL, NULL_RTX)) 1194 return 0; 1195 1196 if (GET_CODE (pat) == SET && set_noop_p (pat)) 1197 return 1; 1198 1199 if (GET_CODE (pat) == PARALLEL) 1200 { 1201 int i; 1202 /* If nothing but SETs of registers to themselves, 1203 this insn can also be deleted. */ 1204 for (i = 0; i < XVECLEN (pat, 0); i++) 1205 { 1206 rtx tem = XVECEXP (pat, 0, i); 1207 1208 if (GET_CODE (tem) == USE 1209 || GET_CODE (tem) == CLOBBER) 1210 continue; 1211 1212 if (GET_CODE (tem) != SET || ! set_noop_p (tem)) 1213 return 0; 1214 } 1215 1216 return 1; 1217 } 1218 return 0; 1219} 1220 1221 1222/* Return the last thing that X was assigned from before *PINSN. If VALID_TO 1223 is not NULL_RTX then verify that the object is not modified up to VALID_TO. 1224 If the object was modified, if we hit a partial assignment to X, or hit a 1225 CODE_LABEL first, return X. If we found an assignment, update *PINSN to 1226 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to 1227 be the src. */ 1228 1229rtx 1230find_last_value (x, pinsn, valid_to, allow_hwreg) 1231 rtx x; 1232 rtx *pinsn; 1233 rtx valid_to; 1234 int allow_hwreg; 1235{ 1236 rtx p; 1237 1238 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL; 1239 p = PREV_INSN (p)) 1240 if (INSN_P (p)) 1241 { 1242 rtx set = single_set (p); 1243 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX); 1244 1245 if (set && rtx_equal_p (x, SET_DEST (set))) 1246 { 1247 rtx src = SET_SRC (set); 1248 1249 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST) 1250 src = XEXP (note, 0); 1251 1252 if ((valid_to == NULL_RTX 1253 || ! modified_between_p (src, PREV_INSN (p), valid_to)) 1254 /* Reject hard registers because we don't usually want 1255 to use them; we'd rather use a pseudo. */ 1256 && (! (GET_CODE (src) == REG 1257 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg)) 1258 { 1259 *pinsn = p; 1260 return src; 1261 } 1262 } 1263 1264 /* If set in non-simple way, we don't have a value. */ 1265 if (reg_set_p (x, p)) 1266 break; 1267 } 1268 1269 return x; 1270} 1271 1272/* Return nonzero if register in range [REGNO, ENDREGNO) 1273 appears either explicitly or implicitly in X 1274 other than being stored into. 1275 1276 References contained within the substructure at LOC do not count. 1277 LOC may be zero, meaning don't ignore anything. */ 1278 1279int 1280refers_to_regno_p (regno, endregno, x, loc) 1281 unsigned int regno, endregno; 1282 rtx x; 1283 rtx *loc; 1284{ 1285 int i; 1286 unsigned int x_regno; 1287 RTX_CODE code; 1288 const char *fmt; 1289 1290 repeat: 1291 /* The contents of a REG_NONNEG note is always zero, so we must come here 1292 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */ 1293 if (x == 0) 1294 return 0; 1295 1296 code = GET_CODE (x); 1297 1298 switch (code) 1299 { 1300 case REG: 1301 x_regno = REGNO (x); 1302 1303 /* If we modifying the stack, frame, or argument pointer, it will 1304 clobber a virtual register. In fact, we could be more precise, 1305 but it isn't worth it. */ 1306 if ((x_regno == STACK_POINTER_REGNUM 1307#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 1308 || x_regno == ARG_POINTER_REGNUM 1309#endif 1310 || x_regno == FRAME_POINTER_REGNUM) 1311 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER) 1312 return 1; 1313 1314 return (endregno > x_regno 1315 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER 1316 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x)) 1317 : 1)); 1318 1319 case SUBREG: 1320 /* If this is a SUBREG of a hard reg, we can see exactly which 1321 registers are being modified. Otherwise, handle normally. */ 1322 if (GET_CODE (SUBREG_REG (x)) == REG 1323 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) 1324 { 1325 unsigned int inner_regno = subreg_regno (x); 1326 unsigned int inner_endregno 1327 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER 1328 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); 1329 1330 return endregno > inner_regno && regno < inner_endregno; 1331 } 1332 break; 1333 1334 case CLOBBER: 1335 case SET: 1336 if (&SET_DEST (x) != loc 1337 /* Note setting a SUBREG counts as referring to the REG it is in for 1338 a pseudo but not for hard registers since we can 1339 treat each word individually. */ 1340 && ((GET_CODE (SET_DEST (x)) == SUBREG 1341 && loc != &SUBREG_REG (SET_DEST (x)) 1342 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG 1343 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER 1344 && refers_to_regno_p (regno, endregno, 1345 SUBREG_REG (SET_DEST (x)), loc)) 1346 || (GET_CODE (SET_DEST (x)) != REG 1347 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc)))) 1348 return 1; 1349 1350 if (code == CLOBBER || loc == &SET_SRC (x)) 1351 return 0; 1352 x = SET_SRC (x); 1353 goto repeat; 1354 1355 default: 1356 break; 1357 } 1358 1359 /* X does not match, so try its subexpressions. */ 1360 1361 fmt = GET_RTX_FORMAT (code); 1362 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 1363 { 1364 if (fmt[i] == 'e' && loc != &XEXP (x, i)) 1365 { 1366 if (i == 0) 1367 { 1368 x = XEXP (x, 0); 1369 goto repeat; 1370 } 1371 else 1372 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc)) 1373 return 1; 1374 } 1375 else if (fmt[i] == 'E') 1376 { 1377 int j; 1378 for (j = XVECLEN (x, i) - 1; j >=0; j--) 1379 if (loc != &XVECEXP (x, i, j) 1380 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc)) 1381 return 1; 1382 } 1383 } 1384 return 0; 1385} 1386 1387/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, 1388 we check if any register number in X conflicts with the relevant register 1389 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN 1390 contains a MEM (we don't bother checking for memory addresses that can't 1391 conflict because we expect this to be a rare case. */ 1392 1393int 1394reg_overlap_mentioned_p (x, in) 1395 rtx x, in; 1396{ 1397 unsigned int regno, endregno; 1398 1399 /* Overly conservative. */ 1400 if (GET_CODE (x) == STRICT_LOW_PART) 1401 x = XEXP (x, 0); 1402 1403 /* If either argument is a constant, then modifying X can not affect IN. */ 1404 if (CONSTANT_P (x) || CONSTANT_P (in)) 1405 return 0; 1406 1407 switch (GET_CODE (x)) 1408 { 1409 case SUBREG: 1410 regno = REGNO (SUBREG_REG (x)); 1411 if (regno < FIRST_PSEUDO_REGISTER) 1412 regno = subreg_regno (x); 1413 goto do_reg; 1414 1415 case REG: 1416 regno = REGNO (x); 1417 do_reg: 1418 endregno = regno + (regno < FIRST_PSEUDO_REGISTER 1419 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); 1420 return refers_to_regno_p (regno, endregno, in, (rtx*) 0); 1421 1422 case MEM: 1423 { 1424 const char *fmt; 1425 int i; 1426 1427 if (GET_CODE (in) == MEM) 1428 return 1; 1429 1430 fmt = GET_RTX_FORMAT (GET_CODE (in)); 1431 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--) 1432 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i))) 1433 return 1; 1434 1435 return 0; 1436 } 1437 1438 case SCRATCH: 1439 case PC: 1440 case CC0: 1441 return reg_mentioned_p (x, in); 1442 1443 case PARALLEL: 1444 { 1445 int i; 1446 1447 /* If any register in here refers to it we return true. */ 1448 for (i = XVECLEN (x, 0) - 1; i >= 0; i--) 1449 if (XEXP (XVECEXP (x, 0, i), 0) != 0 1450 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in)) 1451 return 1; 1452 return 0; 1453 } 1454 1455 default: 1456 break; 1457 } 1458 1459 abort (); 1460} 1461 1462/* Return the last value to which REG was set prior to INSN. If we can't 1463 find it easily, return 0. 1464 1465 We only return a REG, SUBREG, or constant because it is too hard to 1466 check if a MEM remains unchanged. */ 1467 1468rtx 1469reg_set_last (x, insn) 1470 rtx x; 1471 rtx insn; 1472{ 1473 rtx orig_insn = insn; 1474 1475 /* Scan backwards until reg_set_last_1 changed one of the above flags. 1476 Stop when we reach a label or X is a hard reg and we reach a 1477 CALL_INSN (if reg_set_last_last_regno is a hard reg). 1478 1479 If we find a set of X, ensure that its SET_SRC remains unchanged. */ 1480 1481 /* We compare with <= here, because reg_set_last_last_regno 1482 is actually the number of the first reg *not* in X. */ 1483 for (; 1484 insn && GET_CODE (insn) != CODE_LABEL 1485 && ! (GET_CODE (insn) == CALL_INSN 1486 && REGNO (x) <= FIRST_PSEUDO_REGISTER); 1487 insn = PREV_INSN (insn)) 1488 if (INSN_P (insn)) 1489 { 1490 rtx set = set_of (x, insn); 1491 /* OK, this function modify our register. See if we understand it. */ 1492 if (set) 1493 { 1494 rtx last_value; 1495 if (GET_CODE (set) != SET || SET_DEST (set) != x) 1496 return 0; 1497 last_value = SET_SRC (x); 1498 if (CONSTANT_P (last_value) 1499 || ((GET_CODE (last_value) == REG 1500 || GET_CODE (last_value) == SUBREG) 1501 && ! reg_set_between_p (last_value, 1502 insn, orig_insn))) 1503 return last_value; 1504 else 1505 return 0; 1506 } 1507 } 1508 1509 return 0; 1510} 1511 1512/* Call FUN on each register or MEM that is stored into or clobbered by X. 1513 (X would be the pattern of an insn). 1514 FUN receives two arguments: 1515 the REG, MEM, CC0 or PC being stored in or clobbered, 1516 the SET or CLOBBER rtx that does the store. 1517 1518 If the item being stored in or clobbered is a SUBREG of a hard register, 1519 the SUBREG will be passed. */ 1520 1521void 1522note_stores (x, fun, data) 1523 rtx x; 1524 void (*fun) PARAMS ((rtx, rtx, void *)); 1525 void *data; 1526{ 1527 int i; 1528 1529 if (GET_CODE (x) == COND_EXEC) 1530 x = COND_EXEC_CODE (x); 1531 1532 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) 1533 { 1534 rtx dest = SET_DEST (x); 1535 1536 while ((GET_CODE (dest) == SUBREG 1537 && (GET_CODE (SUBREG_REG (dest)) != REG 1538 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER)) 1539 || GET_CODE (dest) == ZERO_EXTRACT 1540 || GET_CODE (dest) == SIGN_EXTRACT 1541 || GET_CODE (dest) == STRICT_LOW_PART) 1542 dest = XEXP (dest, 0); 1543 1544 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, 1545 each of whose first operand is a register. */ 1546 if (GET_CODE (dest) == PARALLEL) 1547 { 1548 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) 1549 if (XEXP (XVECEXP (dest, 0, i), 0) != 0) 1550 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data); 1551 } 1552 else 1553 (*fun) (dest, x, data); 1554 } 1555 1556 else if (GET_CODE (x) == PARALLEL) 1557 for (i = XVECLEN (x, 0) - 1; i >= 0; i--) 1558 note_stores (XVECEXP (x, 0, i), fun, data); 1559} 1560 1561/* Like notes_stores, but call FUN for each expression that is being 1562 referenced in PBODY, a pointer to the PATTERN of an insn. We only call 1563 FUN for each expression, not any interior subexpressions. FUN receives a 1564 pointer to the expression and the DATA passed to this function. 1565 1566 Note that this is not quite the same test as that done in reg_referenced_p 1567 since that considers something as being referenced if it is being 1568 partially set, while we do not. */ 1569 1570void 1571note_uses (pbody, fun, data) 1572 rtx *pbody; 1573 void (*fun) PARAMS ((rtx *, void *)); 1574 void *data; 1575{ 1576 rtx body = *pbody; 1577 int i; 1578 1579 switch (GET_CODE (body)) 1580 { 1581 case COND_EXEC: 1582 (*fun) (&COND_EXEC_TEST (body), data); 1583 note_uses (&COND_EXEC_CODE (body), fun, data); 1584 return; 1585 1586 case PARALLEL: 1587 for (i = XVECLEN (body, 0) - 1; i >= 0; i--) 1588 note_uses (&XVECEXP (body, 0, i), fun, data); 1589 return; 1590 1591 case USE: 1592 (*fun) (&XEXP (body, 0), data); 1593 return; 1594 1595 case ASM_OPERANDS: 1596 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) 1597 (*fun) (&ASM_OPERANDS_INPUT (body, i), data); 1598 return; 1599 1600 case TRAP_IF: 1601 (*fun) (&TRAP_CONDITION (body), data); 1602 return; 1603 1604 case PREFETCH: 1605 (*fun) (&XEXP (body, 0), data); 1606 return; 1607 1608 case UNSPEC: 1609 case UNSPEC_VOLATILE: 1610 for (i = XVECLEN (body, 0) - 1; i >= 0; i--) 1611 (*fun) (&XVECEXP (body, 0, i), data); 1612 return; 1613 1614 case CLOBBER: 1615 if (GET_CODE (XEXP (body, 0)) == MEM) 1616 (*fun) (&XEXP (XEXP (body, 0), 0), data); 1617 return; 1618 1619 case SET: 1620 { 1621 rtx dest = SET_DEST (body); 1622 1623 /* For sets we replace everything in source plus registers in memory 1624 expression in store and operands of a ZERO_EXTRACT. */ 1625 (*fun) (&SET_SRC (body), data); 1626 1627 if (GET_CODE (dest) == ZERO_EXTRACT) 1628 { 1629 (*fun) (&XEXP (dest, 1), data); 1630 (*fun) (&XEXP (dest, 2), data); 1631 } 1632 1633 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART) 1634 dest = XEXP (dest, 0); 1635 1636 if (GET_CODE (dest) == MEM) 1637 (*fun) (&XEXP (dest, 0), data); 1638 } 1639 return; 1640 1641 default: 1642 /* All the other possibilities never store. */ 1643 (*fun) (pbody, data); 1644 return; 1645 } 1646} 1647 1648/* Return nonzero if X's old contents don't survive after INSN. 1649 This will be true if X is (cc0) or if X is a register and 1650 X dies in INSN or because INSN entirely sets X. 1651 1652 "Entirely set" means set directly and not through a SUBREG, 1653 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains. 1654 Likewise, REG_INC does not count. 1655 1656 REG may be a hard or pseudo reg. Renumbering is not taken into account, 1657 but for this use that makes no difference, since regs don't overlap 1658 during their lifetimes. Therefore, this function may be used 1659 at any time after deaths have been computed (in flow.c). 1660 1661 If REG is a hard reg that occupies multiple machine registers, this 1662 function will only return 1 if each of those registers will be replaced 1663 by INSN. */ 1664 1665int 1666dead_or_set_p (insn, x) 1667 rtx insn; 1668 rtx x; 1669{ 1670 unsigned int regno, last_regno; 1671 unsigned int i; 1672 1673 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */ 1674 if (GET_CODE (x) == CC0) 1675 return 1; 1676 1677 if (GET_CODE (x) != REG) 1678 abort (); 1679 1680 regno = REGNO (x); 1681 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno 1682 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1); 1683 1684 for (i = regno; i <= last_regno; i++) 1685 if (! dead_or_set_regno_p (insn, i)) 1686 return 0; 1687 1688 return 1; 1689} 1690 1691/* Utility function for dead_or_set_p to check an individual register. Also 1692 called from flow.c. */ 1693 1694int 1695dead_or_set_regno_p (insn, test_regno) 1696 rtx insn; 1697 unsigned int test_regno; 1698{ 1699 unsigned int regno, endregno; 1700 rtx pattern; 1701 1702 /* See if there is a death note for something that includes TEST_REGNO. */ 1703 if (find_regno_note (insn, REG_DEAD, test_regno)) 1704 return 1; 1705 1706 if (GET_CODE (insn) == CALL_INSN 1707 && find_regno_fusage (insn, CLOBBER, test_regno)) 1708 return 1; 1709 1710 pattern = PATTERN (insn); 1711 1712 if (GET_CODE (pattern) == COND_EXEC) 1713 pattern = COND_EXEC_CODE (pattern); 1714 1715 if (GET_CODE (pattern) == SET) 1716 { 1717 rtx dest = SET_DEST (PATTERN (insn)); 1718 1719 /* A value is totally replaced if it is the destination or the 1720 destination is a SUBREG of REGNO that does not change the number of 1721 words in it. */ 1722 if (GET_CODE (dest) == SUBREG 1723 && (((GET_MODE_SIZE (GET_MODE (dest)) 1724 + UNITS_PER_WORD - 1) / UNITS_PER_WORD) 1725 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) 1726 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) 1727 dest = SUBREG_REG (dest); 1728 1729 if (GET_CODE (dest) != REG) 1730 return 0; 1731 1732 regno = REGNO (dest); 1733 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1 1734 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest))); 1735 1736 return (test_regno >= regno && test_regno < endregno); 1737 } 1738 else if (GET_CODE (pattern) == PARALLEL) 1739 { 1740 int i; 1741 1742 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--) 1743 { 1744 rtx body = XVECEXP (pattern, 0, i); 1745 1746 if (GET_CODE (body) == COND_EXEC) 1747 body = COND_EXEC_CODE (body); 1748 1749 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) 1750 { 1751 rtx dest = SET_DEST (body); 1752 1753 if (GET_CODE (dest) == SUBREG 1754 && (((GET_MODE_SIZE (GET_MODE (dest)) 1755 + UNITS_PER_WORD - 1) / UNITS_PER_WORD) 1756 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) 1757 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) 1758 dest = SUBREG_REG (dest); 1759 1760 if (GET_CODE (dest) != REG) 1761 continue; 1762 1763 regno = REGNO (dest); 1764 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1 1765 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest))); 1766 1767 if (test_regno >= regno && test_regno < endregno) 1768 return 1; 1769 } 1770 } 1771 } 1772 1773 return 0; 1774} 1775 1776/* Return the reg-note of kind KIND in insn INSN, if there is one. 1777 If DATUM is nonzero, look for one whose datum is DATUM. */ 1778 1779rtx 1780find_reg_note (insn, kind, datum) 1781 rtx insn; 1782 enum reg_note kind; 1783 rtx datum; 1784{ 1785 rtx link; 1786 1787 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ 1788 if (! INSN_P (insn)) 1789 return 0; 1790 1791 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 1792 if (REG_NOTE_KIND (link) == kind 1793 && (datum == 0 || datum == XEXP (link, 0))) 1794 return link; 1795 return 0; 1796} 1797 1798/* Return the reg-note of kind KIND in insn INSN which applies to register 1799 number REGNO, if any. Return 0 if there is no such reg-note. Note that 1800 the REGNO of this NOTE need not be REGNO if REGNO is a hard register; 1801 it might be the case that the note overlaps REGNO. */ 1802 1803rtx 1804find_regno_note (insn, kind, regno) 1805 rtx insn; 1806 enum reg_note kind; 1807 unsigned int regno; 1808{ 1809 rtx link; 1810 1811 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ 1812 if (! INSN_P (insn)) 1813 return 0; 1814 1815 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 1816 if (REG_NOTE_KIND (link) == kind 1817 /* Verify that it is a register, so that scratch and MEM won't cause a 1818 problem here. */ 1819 && GET_CODE (XEXP (link, 0)) == REG 1820 && REGNO (XEXP (link, 0)) <= regno 1821 && ((REGNO (XEXP (link, 0)) 1822 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1 1823 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)), 1824 GET_MODE (XEXP (link, 0))))) 1825 > regno)) 1826 return link; 1827 return 0; 1828} 1829 1830/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and 1831 has such a note. */ 1832 1833rtx 1834find_reg_equal_equiv_note (insn) 1835 rtx insn; 1836{ 1837 rtx note; 1838 1839 if (single_set (insn) == 0) 1840 return 0; 1841 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0) 1842 return note; 1843 else 1844 return find_reg_note (insn, REG_EQUAL, NULL_RTX); 1845} 1846 1847/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found 1848 in the CALL_INSN_FUNCTION_USAGE information of INSN. */ 1849 1850int 1851find_reg_fusage (insn, code, datum) 1852 rtx insn; 1853 enum rtx_code code; 1854 rtx datum; 1855{ 1856 /* If it's not a CALL_INSN, it can't possibly have a 1857 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */ 1858 if (GET_CODE (insn) != CALL_INSN) 1859 return 0; 1860 1861 if (! datum) 1862 abort (); 1863 1864 if (GET_CODE (datum) != REG) 1865 { 1866 rtx link; 1867 1868 for (link = CALL_INSN_FUNCTION_USAGE (insn); 1869 link; 1870 link = XEXP (link, 1)) 1871 if (GET_CODE (XEXP (link, 0)) == code 1872 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0))) 1873 return 1; 1874 } 1875 else 1876 { 1877 unsigned int regno = REGNO (datum); 1878 1879 /* CALL_INSN_FUNCTION_USAGE information cannot contain references 1880 to pseudo registers, so don't bother checking. */ 1881 1882 if (regno < FIRST_PSEUDO_REGISTER) 1883 { 1884 unsigned int end_regno 1885 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum)); 1886 unsigned int i; 1887 1888 for (i = regno; i < end_regno; i++) 1889 if (find_regno_fusage (insn, code, i)) 1890 return 1; 1891 } 1892 } 1893 1894 return 0; 1895} 1896 1897/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found 1898 in the CALL_INSN_FUNCTION_USAGE information of INSN. */ 1899 1900int 1901find_regno_fusage (insn, code, regno) 1902 rtx insn; 1903 enum rtx_code code; 1904 unsigned int regno; 1905{ 1906 rtx link; 1907 1908 /* CALL_INSN_FUNCTION_USAGE information cannot contain references 1909 to pseudo registers, so don't bother checking. */ 1910 1911 if (regno >= FIRST_PSEUDO_REGISTER 1912 || GET_CODE (insn) != CALL_INSN ) 1913 return 0; 1914 1915 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) 1916 { 1917 unsigned int regnote; 1918 rtx op, reg; 1919 1920 if (GET_CODE (op = XEXP (link, 0)) == code 1921 && GET_CODE (reg = XEXP (op, 0)) == REG 1922 && (regnote = REGNO (reg)) <= regno 1923 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno) 1924 return 1; 1925 } 1926 1927 return 0; 1928} 1929 1930/* Return true if INSN is a call to a pure function. */ 1931 1932int 1933pure_call_p (insn) 1934 rtx insn; 1935{ 1936 rtx link; 1937 1938 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn)) 1939 return 0; 1940 1941 /* Look for the note that differentiates const and pure functions. */ 1942 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) 1943 { 1944 rtx u, m; 1945 1946 if (GET_CODE (u = XEXP (link, 0)) == USE 1947 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode 1948 && GET_CODE (XEXP (m, 0)) == SCRATCH) 1949 return 1; 1950 } 1951 1952 return 0; 1953} 1954 1955/* Remove register note NOTE from the REG_NOTES of INSN. */ 1956 1957void 1958remove_note (insn, note) 1959 rtx insn; 1960 rtx note; 1961{ 1962 rtx link; 1963 1964 if (note == NULL_RTX) 1965 return; 1966 1967 if (REG_NOTES (insn) == note) 1968 { 1969 REG_NOTES (insn) = XEXP (note, 1); 1970 return; 1971 } 1972 1973 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 1974 if (XEXP (link, 1) == note) 1975 { 1976 XEXP (link, 1) = XEXP (note, 1); 1977 return; 1978 } 1979 1980 abort (); 1981} 1982 1983/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and 1984 return 1 if it is found. A simple equality test is used to determine if 1985 NODE matches. */ 1986 1987int 1988in_expr_list_p (listp, node) 1989 rtx listp; 1990 rtx node; 1991{ 1992 rtx x; 1993 1994 for (x = listp; x; x = XEXP (x, 1)) 1995 if (node == XEXP (x, 0)) 1996 return 1; 1997 1998 return 0; 1999} 2000 2001/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and 2002 remove that entry from the list if it is found. 2003 2004 A simple equality test is used to determine if NODE matches. */ 2005 2006void 2007remove_node_from_expr_list (node, listp) 2008 rtx node; 2009 rtx *listp; 2010{ 2011 rtx temp = *listp; 2012 rtx prev = NULL_RTX; 2013 2014 while (temp) 2015 { 2016 if (node == XEXP (temp, 0)) 2017 { 2018 /* Splice the node out of the list. */ 2019 if (prev) 2020 XEXP (prev, 1) = XEXP (temp, 1); 2021 else 2022 *listp = XEXP (temp, 1); 2023 2024 return; 2025 } 2026 2027 prev = temp; 2028 temp = XEXP (temp, 1); 2029 } 2030} 2031 2032/* Nonzero if X contains any volatile instructions. These are instructions 2033 which may cause unpredictable machine state instructions, and thus no 2034 instructions should be moved or combined across them. This includes 2035 only volatile asms and UNSPEC_VOLATILE instructions. */ 2036 2037int 2038volatile_insn_p (x) 2039 rtx x; 2040{ 2041 RTX_CODE code; 2042 2043 code = GET_CODE (x); 2044 switch (code) 2045 { 2046 case LABEL_REF: 2047 case SYMBOL_REF: 2048 case CONST_INT: 2049 case CONST: 2050 case CONST_DOUBLE: 2051 case CONST_VECTOR: 2052 case CC0: 2053 case PC: 2054 case REG: 2055 case SCRATCH: 2056 case CLOBBER: 2057 case ASM_INPUT: 2058 case ADDR_VEC: 2059 case ADDR_DIFF_VEC: 2060 case CALL: 2061 case MEM: 2062 return 0; 2063 2064 case UNSPEC_VOLATILE: 2065 /* case TRAP_IF: This isn't clear yet. */ 2066 return 1; 2067 2068 case ASM_OPERANDS: 2069 if (MEM_VOLATILE_P (x)) 2070 return 1; 2071 2072 default: 2073 break; 2074 } 2075 2076 /* Recursively scan the operands of this expression. */ 2077 2078 { 2079 const char *fmt = GET_RTX_FORMAT (code); 2080 int i; 2081 2082 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2083 { 2084 if (fmt[i] == 'e') 2085 { 2086 if (volatile_insn_p (XEXP (x, i))) 2087 return 1; 2088 } 2089 else if (fmt[i] == 'E') 2090 { 2091 int j; 2092 for (j = 0; j < XVECLEN (x, i); j++) 2093 if (volatile_insn_p (XVECEXP (x, i, j))) 2094 return 1; 2095 } 2096 } 2097 } 2098 return 0; 2099} 2100 2101/* Nonzero if X contains any volatile memory references 2102 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */ 2103 2104int 2105volatile_refs_p (x) 2106 rtx x; 2107{ 2108 RTX_CODE code; 2109 2110 code = GET_CODE (x); 2111 switch (code) 2112 { 2113 case LABEL_REF: 2114 case SYMBOL_REF: 2115 case CONST_INT: 2116 case CONST: 2117 case CONST_DOUBLE: 2118 case CONST_VECTOR: 2119 case CC0: 2120 case PC: 2121 case REG: 2122 case SCRATCH: 2123 case CLOBBER: 2124 case ASM_INPUT: 2125 case ADDR_VEC: 2126 case ADDR_DIFF_VEC: 2127 return 0; 2128 2129 case CALL: 2130 case UNSPEC_VOLATILE: 2131 /* case TRAP_IF: This isn't clear yet. */ 2132 return 1; 2133 2134 case MEM: 2135 case ASM_OPERANDS: 2136 if (MEM_VOLATILE_P (x)) 2137 return 1; 2138 2139 default: 2140 break; 2141 } 2142 2143 /* Recursively scan the operands of this expression. */ 2144 2145 { 2146 const char *fmt = GET_RTX_FORMAT (code); 2147 int i; 2148 2149 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2150 { 2151 if (fmt[i] == 'e') 2152 { 2153 if (volatile_refs_p (XEXP (x, i))) 2154 return 1; 2155 } 2156 else if (fmt[i] == 'E') 2157 { 2158 int j; 2159 for (j = 0; j < XVECLEN (x, i); j++) 2160 if (volatile_refs_p (XVECEXP (x, i, j))) 2161 return 1; 2162 } 2163 } 2164 } 2165 return 0; 2166} 2167 2168/* Similar to above, except that it also rejects register pre- and post- 2169 incrementing. */ 2170 2171int 2172side_effects_p (x) 2173 rtx x; 2174{ 2175 RTX_CODE code; 2176 2177 code = GET_CODE (x); 2178 switch (code) 2179 { 2180 case LABEL_REF: 2181 case SYMBOL_REF: 2182 case CONST_INT: 2183 case CONST: 2184 case CONST_DOUBLE: 2185 case CONST_VECTOR: 2186 case CC0: 2187 case PC: 2188 case REG: 2189 case SCRATCH: 2190 case ASM_INPUT: 2191 case ADDR_VEC: 2192 case ADDR_DIFF_VEC: 2193 return 0; 2194 2195 case CLOBBER: 2196 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c 2197 when some combination can't be done. If we see one, don't think 2198 that we can simplify the expression. */ 2199 return (GET_MODE (x) != VOIDmode); 2200 2201 case PRE_INC: 2202 case PRE_DEC: 2203 case POST_INC: 2204 case POST_DEC: 2205 case PRE_MODIFY: 2206 case POST_MODIFY: 2207 case CALL: 2208 case UNSPEC_VOLATILE: 2209 /* case TRAP_IF: This isn't clear yet. */ 2210 return 1; 2211 2212 case MEM: 2213 case ASM_OPERANDS: 2214 if (MEM_VOLATILE_P (x)) 2215 return 1; 2216 2217 default: 2218 break; 2219 } 2220 2221 /* Recursively scan the operands of this expression. */ 2222 2223 { 2224 const char *fmt = GET_RTX_FORMAT (code); 2225 int i; 2226 2227 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2228 { 2229 if (fmt[i] == 'e') 2230 { 2231 if (side_effects_p (XEXP (x, i))) 2232 return 1; 2233 } 2234 else if (fmt[i] == 'E') 2235 { 2236 int j; 2237 for (j = 0; j < XVECLEN (x, i); j++) 2238 if (side_effects_p (XVECEXP (x, i, j))) 2239 return 1; 2240 } 2241 } 2242 } 2243 return 0; 2244} 2245 2246/* Return nonzero if evaluating rtx X might cause a trap. */ 2247 2248int 2249may_trap_p (x) 2250 rtx x; 2251{ 2252 int i; 2253 enum rtx_code code; 2254 const char *fmt; 2255 2256 if (x == 0) 2257 return 0; 2258 code = GET_CODE (x); 2259 switch (code) 2260 { 2261 /* Handle these cases quickly. */ 2262 case CONST_INT: 2263 case CONST_DOUBLE: 2264 case CONST_VECTOR: 2265 case SYMBOL_REF: 2266 case LABEL_REF: 2267 case CONST: 2268 case PC: 2269 case CC0: 2270 case REG: 2271 case SCRATCH: 2272 return 0; 2273 2274 case ASM_INPUT: 2275 case UNSPEC_VOLATILE: 2276 case TRAP_IF: 2277 return 1; 2278 2279 case ASM_OPERANDS: 2280 return MEM_VOLATILE_P (x); 2281 2282 /* Memory ref can trap unless it's a static var or a stack slot. */ 2283 case MEM: 2284 return rtx_addr_can_trap_p (XEXP (x, 0)); 2285 2286 /* Division by a non-constant might trap. */ 2287 case DIV: 2288 case MOD: 2289 case UDIV: 2290 case UMOD: 2291 if (! CONSTANT_P (XEXP (x, 1)) 2292 || GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) 2293 return 1; 2294 /* This was const0_rtx, but by not using that, 2295 we can link this file into other programs. */ 2296 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0) 2297 return 1; 2298 break; 2299 2300 case EXPR_LIST: 2301 /* An EXPR_LIST is used to represent a function call. This 2302 certainly may trap. */ 2303 return 1; 2304 2305 case GE: 2306 case GT: 2307 case LE: 2308 case LT: 2309 case COMPARE: 2310 /* Some floating point comparisons may trap. */ 2311 /* ??? There is no machine independent way to check for tests that trap 2312 when COMPARE is used, though many targets do make this distinction. 2313 For instance, sparc uses CCFPE for compares which generate exceptions 2314 and CCFP for compares which do not generate exceptions. */ 2315 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) 2316 return 1; 2317 /* But often the compare has some CC mode, so check operand 2318 modes as well. */ 2319 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT 2320 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT) 2321 return 1; 2322 break; 2323 2324 case NEG: 2325 case ABS: 2326 /* These operations don't trap even with floating point. */ 2327 break; 2328 2329 default: 2330 /* Any floating arithmetic may trap. */ 2331 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) 2332 return 1; 2333 } 2334 2335 fmt = GET_RTX_FORMAT (code); 2336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2337 { 2338 if (fmt[i] == 'e') 2339 { 2340 if (may_trap_p (XEXP (x, i))) 2341 return 1; 2342 } 2343 else if (fmt[i] == 'E') 2344 { 2345 int j; 2346 for (j = 0; j < XVECLEN (x, i); j++) 2347 if (may_trap_p (XVECEXP (x, i, j))) 2348 return 1; 2349 } 2350 } 2351 return 0; 2352} 2353 2354/* Return nonzero if X contains a comparison that is not either EQ or NE, 2355 i.e., an inequality. */ 2356 2357int 2358inequality_comparisons_p (x) 2359 rtx x; 2360{ 2361 const char *fmt; 2362 int len, i; 2363 enum rtx_code code = GET_CODE (x); 2364 2365 switch (code) 2366 { 2367 case REG: 2368 case SCRATCH: 2369 case PC: 2370 case CC0: 2371 case CONST_INT: 2372 case CONST_DOUBLE: 2373 case CONST_VECTOR: 2374 case CONST: 2375 case LABEL_REF: 2376 case SYMBOL_REF: 2377 return 0; 2378 2379 case LT: 2380 case LTU: 2381 case GT: 2382 case GTU: 2383 case LE: 2384 case LEU: 2385 case GE: 2386 case GEU: 2387 return 1; 2388 2389 default: 2390 break; 2391 } 2392 2393 len = GET_RTX_LENGTH (code); 2394 fmt = GET_RTX_FORMAT (code); 2395 2396 for (i = 0; i < len; i++) 2397 { 2398 if (fmt[i] == 'e') 2399 { 2400 if (inequality_comparisons_p (XEXP (x, i))) 2401 return 1; 2402 } 2403 else if (fmt[i] == 'E') 2404 { 2405 int j; 2406 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 2407 if (inequality_comparisons_p (XVECEXP (x, i, j))) 2408 return 1; 2409 } 2410 } 2411 2412 return 0; 2413} 2414 2415/* Replace any occurrence of FROM in X with TO. The function does 2416 not enter into CONST_DOUBLE for the replace. 2417 2418 Note that copying is not done so X must not be shared unless all copies 2419 are to be modified. */ 2420 2421rtx 2422replace_rtx (x, from, to) 2423 rtx x, from, to; 2424{ 2425 int i, j; 2426 const char *fmt; 2427 2428 /* The following prevents loops occurrence when we change MEM in 2429 CONST_DOUBLE onto the same CONST_DOUBLE. */ 2430 if (x != 0 && GET_CODE (x) == CONST_DOUBLE) 2431 return x; 2432 2433 if (x == from) 2434 return to; 2435 2436 /* Allow this function to make replacements in EXPR_LISTs. */ 2437 if (x == 0) 2438 return 0; 2439 2440 if (GET_CODE (x) == SUBREG) 2441 { 2442 rtx new = replace_rtx (SUBREG_REG (x), from, to); 2443 2444 if (GET_CODE (new) == CONST_INT) 2445 { 2446 x = simplify_subreg (GET_MODE (x), new, 2447 GET_MODE (SUBREG_REG (x)), 2448 SUBREG_BYTE (x)); 2449 if (! x) 2450 abort (); 2451 } 2452 else 2453 SUBREG_REG (x) = new; 2454 2455 return x; 2456 } 2457 else if (GET_CODE (x) == ZERO_EXTEND) 2458 { 2459 rtx new = replace_rtx (XEXP (x, 0), from, to); 2460 2461 if (GET_CODE (new) == CONST_INT) 2462 { 2463 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), 2464 new, GET_MODE (XEXP (x, 0))); 2465 if (! x) 2466 abort (); 2467 } 2468 else 2469 XEXP (x, 0) = new; 2470 2471 return x; 2472 } 2473 2474 fmt = GET_RTX_FORMAT (GET_CODE (x)); 2475 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) 2476 { 2477 if (fmt[i] == 'e') 2478 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to); 2479 else if (fmt[i] == 'E') 2480 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 2481 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to); 2482 } 2483 2484 return x; 2485} 2486 2487/* Throughout the rtx X, replace many registers according to REG_MAP. 2488 Return the replacement for X (which may be X with altered contents). 2489 REG_MAP[R] is the replacement for register R, or 0 for don't replace. 2490 NREGS is the length of REG_MAP; regs >= NREGS are not mapped. 2491 2492 We only support REG_MAP entries of REG or SUBREG. Also, hard registers 2493 should not be mapped to pseudos or vice versa since validate_change 2494 is not called. 2495 2496 If REPLACE_DEST is 1, replacements are also done in destinations; 2497 otherwise, only sources are replaced. */ 2498 2499rtx 2500replace_regs (x, reg_map, nregs, replace_dest) 2501 rtx x; 2502 rtx *reg_map; 2503 unsigned int nregs; 2504 int replace_dest; 2505{ 2506 enum rtx_code code; 2507 int i; 2508 const char *fmt; 2509 2510 if (x == 0) 2511 return x; 2512 2513 code = GET_CODE (x); 2514 switch (code) 2515 { 2516 case SCRATCH: 2517 case PC: 2518 case CC0: 2519 case CONST_INT: 2520 case CONST_DOUBLE: 2521 case CONST_VECTOR: 2522 case CONST: 2523 case SYMBOL_REF: 2524 case LABEL_REF: 2525 return x; 2526 2527 case REG: 2528 /* Verify that the register has an entry before trying to access it. */ 2529 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0) 2530 { 2531 /* SUBREGs can't be shared. Always return a copy to ensure that if 2532 this replacement occurs more than once then each instance will 2533 get distinct rtx. */ 2534 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG) 2535 return copy_rtx (reg_map[REGNO (x)]); 2536 return reg_map[REGNO (x)]; 2537 } 2538 return x; 2539 2540 case SUBREG: 2541 /* Prevent making nested SUBREGs. */ 2542 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs 2543 && reg_map[REGNO (SUBREG_REG (x))] != 0 2544 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG) 2545 { 2546 rtx map_val = reg_map[REGNO (SUBREG_REG (x))]; 2547 return simplify_gen_subreg (GET_MODE (x), map_val, 2548 GET_MODE (SUBREG_REG (x)), 2549 SUBREG_BYTE (x)); 2550 } 2551 break; 2552 2553 case SET: 2554 if (replace_dest) 2555 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0); 2556 2557 else if (GET_CODE (SET_DEST (x)) == MEM 2558 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART) 2559 /* Even if we are not to replace destinations, replace register if it 2560 is CONTAINED in destination (destination is memory or 2561 STRICT_LOW_PART). */ 2562 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0), 2563 reg_map, nregs, 0); 2564 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT) 2565 /* Similarly, for ZERO_EXTRACT we replace all operands. */ 2566 break; 2567 2568 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0); 2569 return x; 2570 2571 default: 2572 break; 2573 } 2574 2575 fmt = GET_RTX_FORMAT (code); 2576 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2577 { 2578 if (fmt[i] == 'e') 2579 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest); 2580 else if (fmt[i] == 'E') 2581 { 2582 int j; 2583 for (j = 0; j < XVECLEN (x, i); j++) 2584 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map, 2585 nregs, replace_dest); 2586 } 2587 } 2588 return x; 2589} 2590 2591/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or 2592 constant that is not in the constant pool and not in the condition 2593 of an IF_THEN_ELSE. */ 2594 2595static int 2596computed_jump_p_1 (x) 2597 rtx x; 2598{ 2599 enum rtx_code code = GET_CODE (x); 2600 int i, j; 2601 const char *fmt; 2602 2603 switch (code) 2604 { 2605 case LABEL_REF: 2606 case PC: 2607 return 0; 2608 2609 case CONST: 2610 case CONST_INT: 2611 case CONST_DOUBLE: 2612 case CONST_VECTOR: 2613 case SYMBOL_REF: 2614 case REG: 2615 return 1; 2616 2617 case MEM: 2618 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF 2619 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))); 2620 2621 case IF_THEN_ELSE: 2622 return (computed_jump_p_1 (XEXP (x, 1)) 2623 || computed_jump_p_1 (XEXP (x, 2))); 2624 2625 default: 2626 break; 2627 } 2628 2629 fmt = GET_RTX_FORMAT (code); 2630 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2631 { 2632 if (fmt[i] == 'e' 2633 && computed_jump_p_1 (XEXP (x, i))) 2634 return 1; 2635 2636 else if (fmt[i] == 'E') 2637 for (j = 0; j < XVECLEN (x, i); j++) 2638 if (computed_jump_p_1 (XVECEXP (x, i, j))) 2639 return 1; 2640 } 2641 2642 return 0; 2643} 2644 2645/* Return nonzero if INSN is an indirect jump (aka computed jump). 2646 2647 Tablejumps and casesi insns are not considered indirect jumps; 2648 we can recognize them by a (use (label_ref)). */ 2649 2650int 2651computed_jump_p (insn) 2652 rtx insn; 2653{ 2654 int i; 2655 if (GET_CODE (insn) == JUMP_INSN) 2656 { 2657 rtx pat = PATTERN (insn); 2658 2659 if (find_reg_note (insn, REG_LABEL, NULL_RTX)) 2660 return 0; 2661 else if (GET_CODE (pat) == PARALLEL) 2662 { 2663 int len = XVECLEN (pat, 0); 2664 int has_use_labelref = 0; 2665 2666 for (i = len - 1; i >= 0; i--) 2667 if (GET_CODE (XVECEXP (pat, 0, i)) == USE 2668 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) 2669 == LABEL_REF)) 2670 has_use_labelref = 1; 2671 2672 if (! has_use_labelref) 2673 for (i = len - 1; i >= 0; i--) 2674 if (GET_CODE (XVECEXP (pat, 0, i)) == SET 2675 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx 2676 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i)))) 2677 return 1; 2678 } 2679 else if (GET_CODE (pat) == SET 2680 && SET_DEST (pat) == pc_rtx 2681 && computed_jump_p_1 (SET_SRC (pat))) 2682 return 1; 2683 } 2684 return 0; 2685} 2686 2687/* Traverse X via depth-first search, calling F for each 2688 sub-expression (including X itself). F is also passed the DATA. 2689 If F returns -1, do not traverse sub-expressions, but continue 2690 traversing the rest of the tree. If F ever returns any other 2691 non-zero value, stop the traversal, and return the value returned 2692 by F. Otherwise, return 0. This function does not traverse inside 2693 tree structure that contains RTX_EXPRs, or into sub-expressions 2694 whose format code is `0' since it is not known whether or not those 2695 codes are actually RTL. 2696 2697 This routine is very general, and could (should?) be used to 2698 implement many of the other routines in this file. */ 2699 2700int 2701for_each_rtx (x, f, data) 2702 rtx *x; 2703 rtx_function f; 2704 void *data; 2705{ 2706 int result; 2707 int length; 2708 const char *format; 2709 int i; 2710 2711 /* Call F on X. */ 2712 result = (*f) (x, data); 2713 if (result == -1) 2714 /* Do not traverse sub-expressions. */ 2715 return 0; 2716 else if (result != 0) 2717 /* Stop the traversal. */ 2718 return result; 2719 2720 if (*x == NULL_RTX) 2721 /* There are no sub-expressions. */ 2722 return 0; 2723 2724 length = GET_RTX_LENGTH (GET_CODE (*x)); 2725 format = GET_RTX_FORMAT (GET_CODE (*x)); 2726 2727 for (i = 0; i < length; ++i) 2728 { 2729 switch (format[i]) 2730 { 2731 case 'e': 2732 result = for_each_rtx (&XEXP (*x, i), f, data); 2733 if (result != 0) 2734 return result; 2735 break; 2736 2737 case 'V': 2738 case 'E': 2739 if (XVEC (*x, i) != 0) 2740 { 2741 int j; 2742 for (j = 0; j < XVECLEN (*x, i); ++j) 2743 { 2744 result = for_each_rtx (&XVECEXP (*x, i, j), f, data); 2745 if (result != 0) 2746 return result; 2747 } 2748 } 2749 break; 2750 2751 default: 2752 /* Nothing to do. */ 2753 break; 2754 } 2755 2756 } 2757 2758 return 0; 2759} 2760 2761/* Searches X for any reference to REGNO, returning the rtx of the 2762 reference found if any. Otherwise, returns NULL_RTX. */ 2763 2764rtx 2765regno_use_in (regno, x) 2766 unsigned int regno; 2767 rtx x; 2768{ 2769 const char *fmt; 2770 int i, j; 2771 rtx tem; 2772 2773 if (GET_CODE (x) == REG && REGNO (x) == regno) 2774 return x; 2775 2776 fmt = GET_RTX_FORMAT (GET_CODE (x)); 2777 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) 2778 { 2779 if (fmt[i] == 'e') 2780 { 2781 if ((tem = regno_use_in (regno, XEXP (x, i)))) 2782 return tem; 2783 } 2784 else if (fmt[i] == 'E') 2785 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 2786 if ((tem = regno_use_in (regno , XVECEXP (x, i, j)))) 2787 return tem; 2788 } 2789 2790 return NULL_RTX; 2791} 2792 2793/* Return a value indicating whether OP, an operand of a commutative 2794 operation, is preferred as the first or second operand. The higher 2795 the value, the stronger the preference for being the first operand. 2796 We use negative values to indicate a preference for the first operand 2797 and positive values for the second operand. */ 2798 2799int 2800commutative_operand_precedence (op) 2801 rtx op; 2802{ 2803 /* Constants always come the second operand. Prefer "nice" constants. */ 2804 if (GET_CODE (op) == CONST_INT) 2805 return -5; 2806 if (GET_CODE (op) == CONST_DOUBLE) 2807 return -4; 2808 if (CONSTANT_P (op)) 2809 return -3; 2810 2811 /* SUBREGs of objects should come second. */ 2812 if (GET_CODE (op) == SUBREG 2813 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o') 2814 return -2; 2815 2816 /* If only one operand is a `neg', `not', 2817 `mult', `plus', or `minus' expression, it will be the first 2818 operand. */ 2819 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT 2820 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS 2821 || GET_CODE (op) == MINUS) 2822 return 2; 2823 2824 /* Complex expressions should be the first, so decrease priority 2825 of objects. */ 2826 if (GET_RTX_CLASS (GET_CODE (op)) == 'o') 2827 return -1; 2828 return 0; 2829} 2830 2831/* Return 1 iff it is necessary to swap operands of commutative operation 2832 in order to canonicalize expression. */ 2833 2834int 2835swap_commutative_operands_p (x, y) 2836 rtx x, y; 2837{ 2838 return (commutative_operand_precedence (x) 2839 < commutative_operand_precedence (y)); 2840} 2841 2842/* Return 1 if X is an autoincrement side effect and the register is 2843 not the stack pointer. */ 2844int 2845auto_inc_p (x) 2846 rtx x; 2847{ 2848 switch (GET_CODE (x)) 2849 { 2850 case PRE_INC: 2851 case POST_INC: 2852 case PRE_DEC: 2853 case POST_DEC: 2854 case PRE_MODIFY: 2855 case POST_MODIFY: 2856 /* There are no REG_INC notes for SP. */ 2857 if (XEXP (x, 0) != stack_pointer_rtx) 2858 return 1; 2859 default: 2860 break; 2861 } 2862 return 0; 2863} 2864 2865/* Return 1 if the sequence of instructions beginning with FROM and up 2866 to and including TO is safe to move. If NEW_TO is non-NULL, and 2867 the sequence is not already safe to move, but can be easily 2868 extended to a sequence which is safe, then NEW_TO will point to the 2869 end of the extended sequence. 2870 2871 For now, this function only checks that the region contains whole 2872 exception regions, but it could be extended to check additional 2873 conditions as well. */ 2874 2875int 2876insns_safe_to_move_p (from, to, new_to) 2877 rtx from; 2878 rtx to; 2879 rtx *new_to; 2880{ 2881 int eh_region_count = 0; 2882 int past_to_p = 0; 2883 rtx r = from; 2884 2885 /* By default, assume the end of the region will be what was 2886 suggested. */ 2887 if (new_to) 2888 *new_to = to; 2889 2890 while (r) 2891 { 2892 if (GET_CODE (r) == NOTE) 2893 { 2894 switch (NOTE_LINE_NUMBER (r)) 2895 { 2896 case NOTE_INSN_EH_REGION_BEG: 2897 ++eh_region_count; 2898 break; 2899 2900 case NOTE_INSN_EH_REGION_END: 2901 if (eh_region_count == 0) 2902 /* This sequence of instructions contains the end of 2903 an exception region, but not he beginning. Moving 2904 it will cause chaos. */ 2905 return 0; 2906 2907 --eh_region_count; 2908 break; 2909 2910 default: 2911 break; 2912 } 2913 } 2914 else if (past_to_p) 2915 /* If we've passed TO, and we see a non-note instruction, we 2916 can't extend the sequence to a movable sequence. */ 2917 return 0; 2918 2919 if (r == to) 2920 { 2921 if (!new_to) 2922 /* It's OK to move the sequence if there were matched sets of 2923 exception region notes. */ 2924 return eh_region_count == 0; 2925 2926 past_to_p = 1; 2927 } 2928 2929 /* It's OK to move the sequence if there were matched sets of 2930 exception region notes. */ 2931 if (past_to_p && eh_region_count == 0) 2932 { 2933 *new_to = r; 2934 return 1; 2935 } 2936 2937 /* Go to the next instruction. */ 2938 r = NEXT_INSN (r); 2939 } 2940 2941 return 0; 2942} 2943 2944/* Return non-zero if IN contains a piece of rtl that has the address LOC */ 2945int 2946loc_mentioned_in_p (loc, in) 2947 rtx *loc, in; 2948{ 2949 enum rtx_code code = GET_CODE (in); 2950 const char *fmt = GET_RTX_FORMAT (code); 2951 int i, j; 2952 2953 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2954 { 2955 if (loc == &in->fld[i].rtx) 2956 return 1; 2957 if (fmt[i] == 'e') 2958 { 2959 if (loc_mentioned_in_p (loc, XEXP (in, i))) 2960 return 1; 2961 } 2962 else if (fmt[i] == 'E') 2963 for (j = XVECLEN (in, i) - 1; j >= 0; j--) 2964 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j))) 2965 return 1; 2966 } 2967 return 0; 2968} 2969 2970/* Given a subreg X, return the bit offset where the subreg begins 2971 (counting from the least significant bit of the reg). */ 2972 2973unsigned int 2974subreg_lsb (x) 2975 rtx x; 2976{ 2977 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x)); 2978 enum machine_mode mode = GET_MODE (x); 2979 unsigned int bitpos; 2980 unsigned int byte; 2981 unsigned int word; 2982 2983 /* A paradoxical subreg begins at bit position 0. */ 2984 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode)) 2985 return 0; 2986 2987 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) 2988 /* If the subreg crosses a word boundary ensure that 2989 it also begins and ends on a word boundary. */ 2990 if ((SUBREG_BYTE (x) % UNITS_PER_WORD 2991 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD 2992 && (SUBREG_BYTE (x) % UNITS_PER_WORD 2993 || GET_MODE_SIZE (mode) % UNITS_PER_WORD)) 2994 abort (); 2995 2996 if (WORDS_BIG_ENDIAN) 2997 word = (GET_MODE_SIZE (inner_mode) 2998 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD; 2999 else 3000 word = SUBREG_BYTE (x) / UNITS_PER_WORD; 3001 bitpos = word * BITS_PER_WORD; 3002 3003 if (BYTES_BIG_ENDIAN) 3004 byte = (GET_MODE_SIZE (inner_mode) 3005 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD; 3006 else 3007 byte = SUBREG_BYTE (x) % UNITS_PER_WORD; 3008 bitpos += byte * BITS_PER_UNIT; 3009 3010 return bitpos; 3011} 3012 3013/* This function returns the regno offset of a subreg expression. 3014 xregno - A regno of an inner hard subreg_reg (or what will become one). 3015 xmode - The mode of xregno. 3016 offset - The byte offset. 3017 ymode - The mode of a top level SUBREG (or what may become one). 3018 RETURN - The regno offset which would be used. */ 3019unsigned int 3020subreg_regno_offset (xregno, xmode, offset, ymode) 3021 unsigned int xregno; 3022 enum machine_mode xmode; 3023 unsigned int offset; 3024 enum machine_mode ymode; 3025{ 3026 int nregs_xmode, nregs_ymode; 3027 int mode_multiple, nregs_multiple; 3028 int y_offset; 3029 3030 if (xregno >= FIRST_PSEUDO_REGISTER) 3031 abort (); 3032 3033 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode); 3034 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode); 3035 if (offset == 0 || nregs_xmode == nregs_ymode) 3036 return 0; 3037 3038 /* size of ymode must not be greater than the size of xmode. */ 3039 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); 3040 if (mode_multiple == 0) 3041 abort (); 3042 3043 y_offset = offset / GET_MODE_SIZE (ymode); 3044 nregs_multiple = nregs_xmode / nregs_ymode; 3045 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode; 3046} 3047 3048/* Return the final regno that a subreg expression refers to. */ 3049unsigned int 3050subreg_regno (x) 3051 rtx x; 3052{ 3053 unsigned int ret; 3054 rtx subreg = SUBREG_REG (x); 3055 int regno = REGNO (subreg); 3056 3057 ret = regno + subreg_regno_offset (regno, 3058 GET_MODE (subreg), 3059 SUBREG_BYTE (x), 3060 GET_MODE (x)); 3061 return ret; 3062 3063} 3064struct parms_set_data 3065{ 3066 int nregs; 3067 HARD_REG_SET regs; 3068}; 3069 3070/* Helper function for noticing stores to parameter registers. */ 3071static void 3072parms_set (x, pat, data) 3073 rtx x, pat ATTRIBUTE_UNUSED; 3074 void *data; 3075{ 3076 struct parms_set_data *d = data; 3077 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER 3078 && TEST_HARD_REG_BIT (d->regs, REGNO (x))) 3079 { 3080 CLEAR_HARD_REG_BIT (d->regs, REGNO (x)); 3081 d->nregs--; 3082 } 3083} 3084 3085/* Look backward for first parameter to be loaded. 3086 Do not skip BOUNDARY. */ 3087rtx 3088find_first_parameter_load (call_insn, boundary) 3089 rtx call_insn, boundary; 3090{ 3091 struct parms_set_data parm; 3092 rtx p, before; 3093 3094 /* Since different machines initialize their parameter registers 3095 in different orders, assume nothing. Collect the set of all 3096 parameter registers. */ 3097 CLEAR_HARD_REG_SET (parm.regs); 3098 parm.nregs = 0; 3099 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) 3100 if (GET_CODE (XEXP (p, 0)) == USE 3101 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG) 3102 { 3103 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER) 3104 abort (); 3105 3106 /* We only care about registers which can hold function 3107 arguments. */ 3108 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0)))) 3109 continue; 3110 3111 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0))); 3112 parm.nregs++; 3113 } 3114 before = call_insn; 3115 3116 /* Search backward for the first set of a register in this set. */ 3117 while (parm.nregs && before != boundary) 3118 { 3119 before = PREV_INSN (before); 3120 3121 /* It is possible that some loads got CSEed from one call to 3122 another. Stop in that case. */ 3123 if (GET_CODE (before) == CALL_INSN) 3124 break; 3125 3126 /* Our caller needs either ensure that we will find all sets 3127 (in case code has not been optimized yet), or take care 3128 for possible labels in a way by setting boundary to preceding 3129 CODE_LABEL. */ 3130 if (GET_CODE (before) == CODE_LABEL) 3131 { 3132 if (before != boundary) 3133 abort (); 3134 break; 3135 } 3136 3137 if (INSN_P (before)) 3138 note_stores (PATTERN (before), parms_set, &parm); 3139 } 3140 return before; 3141} 3142