1/* Emit RTL for the GCC expander. 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
|
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 4 Free Software Foundation, Inc. |
5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 2, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING. If not, write to the Free
|
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
|
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 2102110-1301, USA. */ |
22 23 24/* Middle-to-low level generation of rtx code and insns. 25
|
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
|
26 This file contains support functions for creating rtl expressions 27 and manipulating them in the doubly-linked chain of insns. |
28
|
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
|
29 The patterns of the insns are created by machine-dependent 30 routines in insn-emit.c, which is generated automatically from
|
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
|
31 the machine description. These routines make the individual rtx's 32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch], 33 which are automatically generated from rtl.def; what is machine 34 dependent is the kind of rtx's they make and what arguments they 35 use. */ |
36 37#include "config.h" 38#include "system.h" 39#include "coretypes.h" 40#include "tm.h" 41#include "toplev.h" 42#include "rtl.h" 43#include "tree.h" 44#include "tm_p.h" 45#include "flags.h" 46#include "function.h" 47#include "expr.h" 48#include "regs.h" 49#include "hard-reg-set.h" 50#include "hashtab.h" 51#include "insn-config.h" 52#include "recog.h" 53#include "real.h" 54#include "bitmap.h" 55#include "basic-block.h" 56#include "ggc.h" 57#include "debug.h" 58#include "langhooks.h"
|
59#include "tree-pass.h" |
60 61/* Commonly used modes. */ 62 63enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */ 64enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */ 65enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */ 66enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */ 67 68 69/* This is *not* reset after each function. It gives each CODE_LABEL 70 in the entire compilation a unique label number. */ 71 72static GTY(()) int label_num = 1; 73
|
74/* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78static int last_label_num;
79
80/* Value label_num had when set_new_last_label_num was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83static int base_label_num;
84
|
74/* Nonzero means do not generate NOTEs for source line numbers. */ 75 76static int no_line_numbers; 77 78/* Commonly used rtx's, so that we only need space for one copy. 79 These are initialized once for the entire compilation. 80 All of these are unique; no other rtx-object will be equal to any 81 of these. */ 82 83rtx global_rtl[GR_MAX]; 84 85/* Commonly used RTL for hard registers. These objects are not necessarily 86 unique, so we allocate them separately from global_rtl. They are 87 initialized once per compilation unit, then copied into regno_reg_rtx 88 at the beginning of each function. */ 89static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER]; 90 91/* We record floating-point CONST_DOUBLEs in each floating-point mode for 92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we 93 record a copy of const[012]_rtx. */ 94 95rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE]; 96 97rtx const_true_rtx; 98 99REAL_VALUE_TYPE dconst0; 100REAL_VALUE_TYPE dconst1; 101REAL_VALUE_TYPE dconst2; 102REAL_VALUE_TYPE dconst3; 103REAL_VALUE_TYPE dconst10; 104REAL_VALUE_TYPE dconstm1; 105REAL_VALUE_TYPE dconstm2; 106REAL_VALUE_TYPE dconsthalf; 107REAL_VALUE_TYPE dconstthird; 108REAL_VALUE_TYPE dconstpi; 109REAL_VALUE_TYPE dconste; 110 111/* All references to the following fixed hard registers go through 112 these unique rtl objects. On machines where the frame-pointer and 113 arg-pointer are the same register, they use the same unique object. 114 115 After register allocation, other rtl objects which used to be pseudo-regs 116 may be clobbered to refer to the frame-pointer register. 117 But references that were originally to the frame-pointer can be 118 distinguished from the others because they contain frame_pointer_rtx. 119 120 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little 121 tricky: until register elimination has taken place hard_frame_pointer_rtx 122 should be used if it is being set, and frame_pointer_rtx otherwise. After 123 register elimination hard_frame_pointer_rtx should always be used. 124 On machines where the two registers are same (most) then these are the 125 same. 126 127 In an inline procedure, the stack and frame pointer rtxs may not be 128 used for anything else. */ 129rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */ 130rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */ 131rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */ 132 133/* This is used to implement __builtin_return_address for some machines. 134 See for instance the MIPS port. */ 135rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */ 136 137/* We make one copy of (const_int C) where C is in 138 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT] 139 to save space during the compilation and simplify comparisons of 140 integers. */ 141 142rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1]; 143 144/* A hash table storing CONST_INTs whose absolute value is greater 145 than MAX_SAVED_CONST_INT. */ 146 147static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 148 htab_t const_int_htab; 149 150/* A hash table storing memory attribute structures. */ 151static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs))) 152 htab_t mem_attrs_htab; 153 154/* A hash table storing register attribute structures. */ 155static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs))) 156 htab_t reg_attrs_htab; 157 158/* A hash table storing all CONST_DOUBLEs. */ 159static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 160 htab_t const_double_htab; 161 162#define first_insn (cfun->emit->x_first_insn) 163#define last_insn (cfun->emit->x_last_insn) 164#define cur_insn_uid (cfun->emit->x_cur_insn_uid) 165#define last_location (cfun->emit->x_last_location) 166#define first_label_num (cfun->emit->x_first_label_num) 167
|
179static rtx make_jump_insn_raw (rtx);
|
168static rtx make_call_insn_raw (rtx); 169static rtx find_line_note (rtx); 170static rtx change_address_1 (rtx, enum machine_mode, rtx, int); 171static void unshare_all_decls (tree); 172static void reset_used_decls (tree); 173static void mark_label_nuses (rtx); 174static hashval_t const_int_htab_hash (const void *); 175static int const_int_htab_eq (const void *, const void *); 176static hashval_t const_double_htab_hash (const void *); 177static int const_double_htab_eq (const void *, const void *); 178static rtx lookup_const_double (rtx); 179static hashval_t mem_attrs_htab_hash (const void *); 180static int mem_attrs_htab_eq (const void *, const void *); 181static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int, 182 enum machine_mode); 183static hashval_t reg_attrs_htab_hash (const void *); 184static int reg_attrs_htab_eq (const void *, const void *); 185static reg_attrs *get_reg_attrs (tree, int); 186static tree component_ref_for_mem_expr (tree);
|
199static rtx gen_const_vector_0 (enum machine_mode);
200static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
|
187static rtx gen_const_vector (enum machine_mode, int); |
188static void copy_rtx_if_shared_1 (rtx *orig); 189 190/* Probability of the conditional branch currently proceeded by try_split. 191 Set to -1 otherwise. */ 192int split_branch_probability = -1; 193 194/* Returns a hash code for X (which is a really a CONST_INT). */ 195 196static hashval_t 197const_int_htab_hash (const void *x) 198{ 199 return (hashval_t) INTVAL ((rtx) x); 200} 201 202/* Returns nonzero if the value represented by X (which is really a 203 CONST_INT) is the same as that given by Y (which is really a 204 HOST_WIDE_INT *). */ 205 206static int 207const_int_htab_eq (const void *x, const void *y) 208{ 209 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y)); 210} 211 212/* Returns a hash code for X (which is really a CONST_DOUBLE). */ 213static hashval_t 214const_double_htab_hash (const void *x) 215{ 216 rtx value = (rtx) x; 217 hashval_t h; 218 219 if (GET_MODE (value) == VOIDmode) 220 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value); 221 else 222 { 223 h = real_hash (CONST_DOUBLE_REAL_VALUE (value)); 224 /* MODE is used in the comparison, so it should be in the hash. */ 225 h ^= GET_MODE (value); 226 } 227 return h; 228} 229 230/* Returns nonzero if the value represented by X (really a ...) 231 is the same as that represented by Y (really a ...) */ 232static int 233const_double_htab_eq (const void *x, const void *y) 234{ 235 rtx a = (rtx)x, b = (rtx)y; 236 237 if (GET_MODE (a) != GET_MODE (b)) 238 return 0; 239 if (GET_MODE (a) == VOIDmode) 240 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b) 241 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b)); 242 else 243 return real_identical (CONST_DOUBLE_REAL_VALUE (a), 244 CONST_DOUBLE_REAL_VALUE (b)); 245} 246 247/* Returns a hash code for X (which is a really a mem_attrs *). */ 248 249static hashval_t 250mem_attrs_htab_hash (const void *x) 251{ 252 mem_attrs *p = (mem_attrs *) x; 253 254 return (p->alias ^ (p->align * 1000) 255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000) 256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
|
270 ^ (size_t) p->expr);
|
257 ^ (size_t) iterative_hash_expr (p->expr, 0)); |
258} 259 260/* Returns nonzero if the value represented by X (which is really a 261 mem_attrs *) is the same as that given by Y (which is also really a 262 mem_attrs *). */ 263 264static int 265mem_attrs_htab_eq (const void *x, const void *y) 266{ 267 mem_attrs *p = (mem_attrs *) x; 268 mem_attrs *q = (mem_attrs *) y; 269
|
283 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
284 && p->size == q->size && p->align == q->align);
|
270 return (p->alias == q->alias && p->offset == q->offset 271 && p->size == q->size && p->align == q->align 272 && (p->expr == q->expr 273 || (p->expr != NULL_TREE && q->expr != NULL_TREE 274 && operand_equal_p (p->expr, q->expr, 0)))); |
275} 276 277/* Allocate a new mem_attrs structure and insert it into the hash table if 278 one identical to it is not already in the table. We are doing this for 279 MEM of mode MODE. */ 280 281static mem_attrs * 282get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size, 283 unsigned int align, enum machine_mode mode) 284{ 285 mem_attrs attrs; 286 void **slot; 287 288 /* If everything is the default, we can just return zero. 289 This must match what the corresponding MEM_* macros return when the 290 field is not present. */ 291 if (alias == 0 && expr == 0 && offset == 0 292 && (size == 0 293 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size))) 294 && (STRICT_ALIGNMENT && mode != BLKmode 295 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT)) 296 return 0; 297 298 attrs.alias = alias; 299 attrs.expr = expr; 300 attrs.offset = offset; 301 attrs.size = size; 302 attrs.align = align; 303 304 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT); 305 if (*slot == 0) 306 { 307 *slot = ggc_alloc (sizeof (mem_attrs)); 308 memcpy (*slot, &attrs, sizeof (mem_attrs)); 309 } 310 311 return *slot; 312} 313 314/* Returns a hash code for X (which is a really a reg_attrs *). */ 315 316static hashval_t 317reg_attrs_htab_hash (const void *x) 318{ 319 reg_attrs *p = (reg_attrs *) x; 320 321 return ((p->offset * 1000) ^ (long) p->decl); 322} 323 324/* Returns nonzero if the value represented by X (which is really a 325 reg_attrs *) is the same as that given by Y (which is also really a 326 reg_attrs *). */ 327 328static int 329reg_attrs_htab_eq (const void *x, const void *y) 330{ 331 reg_attrs *p = (reg_attrs *) x; 332 reg_attrs *q = (reg_attrs *) y; 333 334 return (p->decl == q->decl && p->offset == q->offset); 335} 336/* Allocate a new reg_attrs structure and insert it into the hash table if 337 one identical to it is not already in the table. We are doing this for 338 MEM of mode MODE. */ 339 340static reg_attrs * 341get_reg_attrs (tree decl, int offset) 342{ 343 reg_attrs attrs; 344 void **slot; 345 346 /* If everything is the default, we can just return zero. */ 347 if (decl == 0 && offset == 0) 348 return 0; 349 350 attrs.decl = decl; 351 attrs.offset = offset; 352 353 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT); 354 if (*slot == 0) 355 { 356 *slot = ggc_alloc (sizeof (reg_attrs)); 357 memcpy (*slot, &attrs, sizeof (reg_attrs)); 358 } 359 360 return *slot; 361} 362 363/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and 364 don't attempt to share with the various global pieces of rtl (such as 365 frame_pointer_rtx). */ 366 367rtx 368gen_raw_REG (enum machine_mode mode, int regno) 369{ 370 rtx x = gen_rtx_raw_REG (mode, regno); 371 ORIGINAL_REGNO (x) = regno; 372 return x; 373} 374 375/* There are some RTL codes that require special attention; the generation 376 functions do the raw handling. If you add to this list, modify 377 special_rtx in gengenrtl.c as well. */ 378 379rtx 380gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg) 381{ 382 void **slot; 383 384 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT) 385 return const_int_rtx[arg + MAX_SAVED_CONST_INT]; 386 387#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1 388 if (const_true_rtx && arg == STORE_FLAG_VALUE) 389 return const_true_rtx; 390#endif 391 392 /* Look up the CONST_INT in the hash table. */ 393 slot = htab_find_slot_with_hash (const_int_htab, &arg, 394 (hashval_t) arg, INSERT); 395 if (*slot == 0) 396 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg); 397 398 return (rtx) *slot; 399} 400 401rtx 402gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode) 403{ 404 return GEN_INT (trunc_int_for_mode (c, mode)); 405} 406 407/* CONST_DOUBLEs might be created from pairs of integers, or from 408 REAL_VALUE_TYPEs. Also, their length is known only at run time, 409 so we cannot use gen_rtx_raw_CONST_DOUBLE. */ 410 411/* Determine whether REAL, a CONST_DOUBLE, already exists in the 412 hash table. If so, return its counterpart; otherwise add it 413 to the hash table and return it. */ 414static rtx 415lookup_const_double (rtx real) 416{ 417 void **slot = htab_find_slot (const_double_htab, real, INSERT); 418 if (*slot == 0) 419 *slot = real; 420 421 return (rtx) *slot; 422} 423 424/* Return a CONST_DOUBLE rtx for a floating-point value specified by 425 VALUE in mode MODE. */ 426rtx 427const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode) 428{ 429 rtx real = rtx_alloc (CONST_DOUBLE); 430 PUT_MODE (real, mode); 431
|
442 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
|
432 real->u.rv = value; |
433 434 return lookup_const_double (real); 435} 436 437/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair 438 of ints: I0 is the low-order word and I1 is the high-order word. 439 Do not use this routine for non-integer modes; convert to 440 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */ 441 442rtx 443immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) 444{ 445 rtx value; 446 unsigned int i; 447
|
448 /* There are the following cases (note that there are no modes with 449 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT): 450 451 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use 452 gen_int_mode. 453 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of 454 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only 455 from copies of the sign bit, and sign of i0 and i1 are the same), then 456 we return a CONST_INT for i0. 457 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */ |
458 if (mode != VOIDmode) 459 {
|
460 int width;
461 if (GET_MODE_CLASS (mode) != MODE_INT
462 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
463 /* We can get a 0 for an error mark. */
464 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
465 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
466 abort ();
|
460 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT 461 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT 462 /* We can get a 0 for an error mark. */ 463 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT 464 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT); |
465
|
468 /* We clear out all bits that don't belong in MODE, unless they and
469 our sign bit are all one. So we get either a reasonable negative
470 value or a reasonable unsigned value for this mode. */
471 width = GET_MODE_BITSIZE (mode);
472 if (width < HOST_BITS_PER_WIDE_INT
473 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
474 != ((HOST_WIDE_INT) (-1) << (width - 1))))
475 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
476 else if (width == HOST_BITS_PER_WIDE_INT
477 && ! (i1 == ~0 && i0 < 0))
478 i1 = 0;
479 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
480 /* We cannot represent this value as a constant. */
481 abort ();
|
466 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) 467 return gen_int_mode (i0, mode); |
468
|
483 /* If this would be an entire word for the target, but is not for
484 the host, then sign-extend on the host so that the number will
485 look the same way on the host that it would on the target.
486
487 For example, when building a 64 bit alpha hosted 32 bit sparc
488 targeted compiler, then we want the 32 bit unsigned value -1 to be
489 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
490 The latter confuses the sparc backend. */
491
492 if (width < HOST_BITS_PER_WIDE_INT
493 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
494 i0 |= ((HOST_WIDE_INT) (-1) << width);
495
496 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
497 CONST_INT.
498
499 ??? Strictly speaking, this is wrong if we create a CONST_INT for
500 a large unsigned constant with the size of MODE being
501 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
502 in a wider mode. In that case we will mis-interpret it as a
503 negative number.
504
505 Unfortunately, the only alternative is to make a CONST_DOUBLE for
506 any constant in any mode if it is an unsigned constant larger
507 than the maximum signed integer in an int on the host. However,
508 doing this will break everyone that always expects to see a
509 CONST_INT for SImode and smaller.
510
511 We have always been making CONST_INTs in this case, so nothing
512 new is being broken. */
513
514 if (width <= HOST_BITS_PER_WIDE_INT)
515 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
|
469 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT); |
470 } 471 472 /* If this integer fits in one word, return a CONST_INT. */ 473 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0)) 474 return GEN_INT (i0); 475 476 /* We use VOIDmode for integers. */ 477 value = rtx_alloc (CONST_DOUBLE); 478 PUT_MODE (value, VOIDmode); 479 480 CONST_DOUBLE_LOW (value) = i0; 481 CONST_DOUBLE_HIGH (value) = i1; 482 483 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++) 484 XWINT (value, i) = 0; 485 486 return lookup_const_double (value); 487} 488 489rtx 490gen_rtx_REG (enum machine_mode mode, unsigned int regno) 491{ 492 /* In case the MD file explicitly references the frame pointer, have 493 all such references point to the same frame pointer. This is 494 used during frame pointer elimination to distinguish the explicit 495 references to these registers from pseudos that happened to be 496 assigned to them. 497 498 If we have eliminated the frame pointer or arg pointer, we will 499 be using it as a normal register, for example as a spill 500 register. In such cases, we might be accessing it in a mode that 501 is not Pmode and therefore cannot use the pre-allocated rtx. 502 503 Also don't do this when we are making new REGs in reload, since 504 we don't want to get confused with the real pointers. */ 505 506 if (mode == Pmode && !reload_in_progress) 507 { 508 if (regno == FRAME_POINTER_REGNUM 509 && (!reload_completed || frame_pointer_needed)) 510 return frame_pointer_rtx; 511#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 512 if (regno == HARD_FRAME_POINTER_REGNUM 513 && (!reload_completed || frame_pointer_needed)) 514 return hard_frame_pointer_rtx; 515#endif 516#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 517 if (regno == ARG_POINTER_REGNUM) 518 return arg_pointer_rtx; 519#endif 520#ifdef RETURN_ADDRESS_POINTER_REGNUM 521 if (regno == RETURN_ADDRESS_POINTER_REGNUM) 522 return return_address_pointer_rtx; 523#endif 524 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM 525 && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) 526 return pic_offset_table_rtx; 527 if (regno == STACK_POINTER_REGNUM) 528 return stack_pointer_rtx; 529 } 530 531#if 0 532 /* If the per-function register table has been set up, try to re-use 533 an existing entry in that table to avoid useless generation of RTL. 534 535 This code is disabled for now until we can fix the various backends 536 which depend on having non-shared hard registers in some cases. Long 537 term we want to re-enable this code as it can significantly cut down 538 on the amount of useless RTL that gets generated. 539 540 We'll also need to fix some code that runs after reload that wants to 541 set ORIGINAL_REGNO. */ 542 543 if (cfun 544 && cfun->emit 545 && regno_reg_rtx 546 && regno < FIRST_PSEUDO_REGISTER 547 && reg_raw_mode[regno] == mode) 548 return regno_reg_rtx[regno]; 549#endif 550 551 return gen_raw_REG (mode, regno); 552} 553 554rtx 555gen_rtx_MEM (enum machine_mode mode, rtx addr) 556{ 557 rtx rt = gen_rtx_raw_MEM (mode, addr); 558 559 /* This field is not cleared by the mere allocation of the rtx, so 560 we clear it here. */ 561 MEM_ATTRS (rt) = 0; 562 563 return rt; 564} 565
|
566/* Generate a memory referring to non-trapping constant memory. */ 567 |
568rtx
|
613gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
|
569gen_const_mem (enum machine_mode mode, rtx addr) |
570{
|
615 /* This is the most common failure type.
616 Catch it early so we can see who does it. */
617 if ((offset % GET_MODE_SIZE (mode)) != 0)
618 abort ();
619
620 /* This check isn't usable right now because combine will
621 throw arbitrary crap like a CALL into a SUBREG in
622 gen_lowpart_for_combine so we must just eat it. */
623#if 0
624 /* Check for this too. */
625 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
626 abort ();
627#endif
628 return gen_rtx_raw_SUBREG (mode, reg, offset);
|
571 rtx mem = gen_rtx_MEM (mode, addr); 572 MEM_READONLY_P (mem) = 1; 573 MEM_NOTRAP_P (mem) = 1; 574 return mem; |
575} 576
|
631/* Generate a SUBREG representing the least-significant part of REG if MODE
632 is smaller than mode of REG, otherwise paradoxical SUBREG. */
|
577/* Generate a MEM referring to fixed portions of the frame, e.g., register 578 save areas. */ |
579 580rtx
|
635gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
|
581gen_frame_mem (enum machine_mode mode, rtx addr) |
582{
|
637 enum machine_mode inmode;
638
639 inmode = GET_MODE (reg);
640 if (inmode == VOIDmode)
641 inmode = mode;
642 return gen_rtx_SUBREG (mode, reg,
643 subreg_lowpart_offset (mode, inmode));
|
583 rtx mem = gen_rtx_MEM (mode, addr); 584 MEM_NOTRAP_P (mem) = 1; 585 set_mem_alias_set (mem, get_frame_alias_set ()); 586 return mem; |
587}
|
645
646/* rtx gen_rtx (code, mode, [element1, ..., elementn])
647**
648** This routine generates an RTX of the size specified by
649** <code>, which is an RTX code. The RTX structure is initialized
650** from the arguments <element1> through <elementn>, which are
651** interpreted according to the specific RTX type's format. The
652** special machine mode associated with the rtx (if any) is specified
653** in <mode>.
654**
655** gen_rtx can be invoked in a way which resembles the lisp-like
656** rtx it will generate. For example, the following rtx structure:
657**
658** (plus:QI (mem:QI (reg:SI 1))
659** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
660**
661** ...would be generated by the following C code:
662**
663** gen_rtx (PLUS, QImode,
664** gen_rtx (MEM, QImode,
665** gen_rtx (REG, SImode, 1)),
666** gen_rtx (MEM, QImode,
667** gen_rtx (PLUS, SImode,
668** gen_rtx (REG, SImode, 2),
669** gen_rtx (REG, SImode, 3)))),
670*/
|
588
|
672/*VARARGS2*/
|
589/* Generate a MEM referring to a temporary use of the stack, not part 590 of the fixed stack frame. For example, something which is pushed 591 by a target splitter. */ |
592rtx
|
674gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
|
593gen_tmp_stack_mem (enum machine_mode mode, rtx addr) |
594{
|
676 int i; /* Array indices... */
677 const char *fmt; /* Current rtx's format... */
678 rtx rt_val; /* RTX to return to caller... */
679 va_list p;
|
595 rtx mem = gen_rtx_MEM (mode, addr); 596 MEM_NOTRAP_P (mem) = 1; 597 if (!current_function_calls_alloca) 598 set_mem_alias_set (mem, get_frame_alias_set ()); 599 return mem; 600} |
601
|
681 va_start (p, mode);
|
602/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if 603 this construct would be valid, and false otherwise. */ |
604
|
683 switch (code)
684 {
685 case CONST_INT:
686 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
687 break;
|
605bool 606validate_subreg (enum machine_mode omode, enum machine_mode imode, 607 rtx reg, unsigned int offset) 608{ 609 unsigned int isize = GET_MODE_SIZE (imode); 610 unsigned int osize = GET_MODE_SIZE (omode); |
611
|
689 case CONST_DOUBLE:
690 {
691 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
692 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
|
612 /* All subregs must be aligned. */ 613 if (offset % osize != 0) 614 return false; |
615
|
694 rt_val = immed_double_const (arg0, arg1, mode);
695 }
696 break;
|
616 /* The subreg offset cannot be outside the inner object. */ 617 if (offset >= isize) 618 return false; |
619
|
698 case REG:
699 rt_val = gen_rtx_REG (mode, va_arg (p, int));
700 break;
|
620 /* ??? This should not be here. Temporarily continue to allow word_mode 621 subregs of anything. The most common offender is (subreg:SI (reg:DF)). 622 Generally, backends are doing something sketchy but it'll take time to 623 fix them all. */ 624 if (omode == word_mode) 625 ; 626 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field 627 is the culprit here, and not the backends. */ 628 else if (osize >= UNITS_PER_WORD && isize >= osize) 629 ; 630 /* Allow component subregs of complex and vector. Though given the below 631 extraction rules, it's not always clear what that means. */ 632 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) 633 && GET_MODE_INNER (imode) == omode) 634 ; 635 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs, 636 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to 637 represent this. It's questionable if this ought to be represented at 638 all -- why can't this all be hidden in post-reload splitters that make 639 arbitrarily mode changes to the registers themselves. */ 640 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode) 641 ; 642 /* Subregs involving floating point modes are not allowed to 643 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but 644 (subreg:SI (reg:DF) 0) isn't. */ 645 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) 646 { 647 if (isize != osize) 648 return false; 649 } |
650
|
702 case MEM:
703 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
704 break;
|
651 /* Paradoxical subregs must have offset zero. */ 652 if (osize > isize) 653 return offset == 0; |
654
|
706 default:
707 rt_val = rtx_alloc (code); /* Allocate the storage space. */
708 rt_val->mode = mode; /* Store the machine mode... */
|
655 /* This is a normal subreg. Verify that the offset is representable. */ |
656
|
710 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
711 for (i = 0; i < GET_RTX_LENGTH (code); i++)
712 {
713 switch (*fmt++)
714 {
715 case '0': /* Field with unknown use. Zero it. */
716 X0EXP (rt_val, i) = NULL_RTX;
717 break;
|
657 /* For hard registers, we already have most of these rules collected in 658 subreg_offset_representable_p. */ 659 if (reg && REG_P (reg) && HARD_REGISTER_P (reg)) 660 { 661 unsigned int regno = REGNO (reg); |
662
|
719 case 'i': /* An integer? */
720 XINT (rt_val, i) = va_arg (p, int);
721 break;
|
663#ifdef CANNOT_CHANGE_MODE_CLASS 664 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) 665 && GET_MODE_INNER (imode) == omode) 666 ; 667 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode)) 668 return false; 669#endif |
670
|
723 case 'w': /* A wide integer? */
724 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
725 break;
|
671 return subreg_offset_representable_p (regno, imode, offset, omode); 672 } |
673
|
727 case 's': /* A string? */
728 XSTR (rt_val, i) = va_arg (p, char *);
729 break;
|
674 /* For pseudo registers, we want most of the same checks. Namely: 675 If the register no larger than a word, the subreg must be lowpart. 676 If the register is larger than a word, the subreg must be the lowpart 677 of a subword. A subreg does *not* perform arbitrary bit extraction. 678 Given that we've already checked mode/offset alignment, we only have 679 to check subword subregs here. */ 680 if (osize < UNITS_PER_WORD) 681 { 682 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode; 683 unsigned int low_off = subreg_lowpart_offset (omode, wmode); 684 if (offset % UNITS_PER_WORD != low_off) 685 return false; 686 } 687 return true; 688} |
689
|
731 case 'e': /* An expression? */
732 case 'u': /* An insn? Same except when printing. */
733 XEXP (rt_val, i) = va_arg (p, rtx);
734 break;
|
690rtx 691gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset) 692{ 693 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); 694 return gen_rtx_raw_SUBREG (mode, reg, offset); 695} |
696
|
736 case 'E': /* An RTX vector? */
737 XVEC (rt_val, i) = va_arg (p, rtvec);
738 break;
|
697/* Generate a SUBREG representing the least-significant part of REG if MODE 698 is smaller than mode of REG, otherwise paradoxical SUBREG. */ |
699
|
740 case 'b': /* A bitmap? */
741 XBITMAP (rt_val, i) = va_arg (p, bitmap);
742 break;
|
700rtx 701gen_lowpart_SUBREG (enum machine_mode mode, rtx reg) 702{ 703 enum machine_mode inmode; |
704
|
744 case 't': /* A tree? */
745 XTREE (rt_val, i) = va_arg (p, tree);
746 break;
747
748 default:
749 abort ();
750 }
751 }
752 break;
753 }
754
755 va_end (p);
756 return rt_val;
|
705 inmode = GET_MODE (reg); 706 if (inmode == VOIDmode) 707 inmode = mode; 708 return gen_rtx_SUBREG (mode, reg, 709 subreg_lowpart_offset (mode, inmode)); |
710}
|
758
|
711 |
712/* gen_rtvec (n, [rt1, ..., rtn]) 713** 714** This routine creates an rtvec and stores within it the 715** pointers to rtx's which are its arguments. 716*/ 717 718/*VARARGS1*/ 719rtvec 720gen_rtvec (int n, ...) 721{ 722 int i, save_n; 723 rtx *vector; 724 va_list p; 725 726 va_start (p, n); 727 728 if (n == 0) 729 return NULL_RTVEC; /* Don't allocate an empty rtvec... */ 730 731 vector = alloca (n * sizeof (rtx)); 732 733 for (i = 0; i < n; i++) 734 vector[i] = va_arg (p, rtx); 735 736 /* The definition of VA_* in K&R C causes `n' to go out of scope. */ 737 save_n = n; 738 va_end (p); 739 740 return gen_rtvec_v (save_n, vector); 741} 742 743rtvec 744gen_rtvec_v (int n, rtx *argp) 745{ 746 int i; 747 rtvec rt_val; 748 749 if (n == 0) 750 return NULL_RTVEC; /* Don't allocate an empty rtvec... */ 751 752 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */ 753 754 for (i = 0; i < n; i++) 755 rt_val->elem[i] = *argp++; 756 757 return rt_val; 758} 759 760/* Generate a REG rtx for a new pseudo register of mode MODE. 761 This pseudo is assigned the next sequential register number. */ 762 763rtx 764gen_reg_rtx (enum machine_mode mode) 765{ 766 struct function *f = cfun; 767 rtx val; 768 769 /* Don't let anything called after initial flow analysis create new 770 registers. */
|
818 if (no_new_pseudos)
819 abort ();
|
771 gcc_assert (!no_new_pseudos); |
772 773 if (generating_concat_p 774 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT 775 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)) 776 { 777 /* For complex modes, don't make a single pseudo. 778 Instead, make a CONCAT of two pseudos. 779 This allows noncontiguous allocation of the real and imaginary parts, 780 which makes much better code. Besides, allocating DCmode 781 pseudos overstrains reload on some machines like the 386. */ 782 rtx realpart, imagpart; 783 enum machine_mode partmode = GET_MODE_INNER (mode); 784 785 realpart = gen_reg_rtx (partmode); 786 imagpart = gen_reg_rtx (partmode); 787 return gen_rtx_CONCAT (mode, realpart, imagpart); 788 } 789 790 /* Make sure regno_pointer_align, and regno_reg_rtx are large 791 enough to have an element for this pseudo reg number. */ 792 793 if (reg_rtx_no == f->emit->regno_pointer_align_length) 794 { 795 int old_size = f->emit->regno_pointer_align_length; 796 char *new; 797 rtx *new1; 798 799 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2); 800 memset (new + old_size, 0, old_size); 801 f->emit->regno_pointer_align = (unsigned char *) new; 802 803 new1 = ggc_realloc (f->emit->x_regno_reg_rtx, 804 old_size * 2 * sizeof (rtx)); 805 memset (new1 + old_size, 0, old_size * sizeof (rtx)); 806 regno_reg_rtx = new1; 807 808 f->emit->regno_pointer_align_length = old_size * 2; 809 } 810 811 val = gen_raw_REG (mode, reg_rtx_no); 812 regno_reg_rtx[reg_rtx_no++] = val; 813 return val; 814} 815
|
864/* Generate a register with same attributes as REG,
865 but offsetted by OFFSET. */
|
816/* Generate a register with same attributes as REG, but offsetted by OFFSET. 817 Do the big endian correction if needed. */ |
818 819rtx 820gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset) 821{ 822 rtx new = gen_rtx_REG (mode, regno);
|
823 tree decl; 824 HOST_WIDE_INT var_size; 825 826 /* PR middle-end/14084 827 The problem appears when a variable is stored in a larger register 828 and later it is used in the original mode or some mode in between 829 or some part of variable is accessed. 830 831 On little endian machines there is no problem because 832 the REG_OFFSET of the start of the variable is the same when 833 accessed in any mode (it is 0). 834 835 However, this is not true on big endian machines. 836 The offset of the start of the variable is different when accessed 837 in different modes. 838 When we are taking a part of the REG we have to change the OFFSET 839 from offset WRT size of mode of REG to offset WRT size of variable. 840 841 If we would not do the big endian correction the resulting REG_OFFSET 842 would be larger than the size of the DECL. 843 844 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine: 845 846 REG.mode MODE DECL size old offset new offset description 847 DI SI 4 4 0 int32 in SImode 848 DI SI 1 4 0 char in SImode 849 DI QI 1 7 0 char in QImode 850 DI QI 4 5 1 1st element in QImode 851 of char[4] 852 DI HI 4 6 2 1st element in HImode 853 of int16[2] 854 855 If the size of DECL is equal or greater than the size of REG 856 we can't do this correction because the register holds the 857 whole variable or a part of the variable and thus the REG_OFFSET 858 is already correct. */ 859 860 decl = REG_EXPR (reg); 861 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN) 862 && decl != NULL 863 && offset > 0 864 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode) 865 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0 866 && var_size < GET_MODE_SIZE (GET_MODE (reg)))) 867 { 868 int offset_le; 869 870 /* Convert machine endian to little endian WRT size of mode of REG. */ 871 if (WORDS_BIG_ENDIAN) 872 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset) 873 / UNITS_PER_WORD) * UNITS_PER_WORD; 874 else 875 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD; 876 877 if (BYTES_BIG_ENDIAN) 878 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset) 879 % UNITS_PER_WORD); 880 else 881 offset_le += offset % UNITS_PER_WORD; 882 883 if (offset_le >= var_size) 884 { 885 /* MODE is wider than the variable so the new reg will cover 886 the whole variable so the resulting OFFSET should be 0. */ 887 offset = 0; 888 } 889 else 890 { 891 /* Convert little endian to machine endian WRT size of variable. */ 892 if (WORDS_BIG_ENDIAN) 893 offset = ((var_size - 1 - offset_le) 894 / UNITS_PER_WORD) * UNITS_PER_WORD; 895 else 896 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD; 897 898 if (BYTES_BIG_ENDIAN) 899 offset += ((var_size - 1 - offset_le) 900 % UNITS_PER_WORD); 901 else 902 offset += offset_le % UNITS_PER_WORD; 903 } 904 } 905 |
906 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg), 907 REG_OFFSET (reg) + offset); 908 return new; 909} 910 911/* Set the decl for MEM to DECL. */ 912 913void 914set_reg_attrs_from_mem (rtx reg, rtx mem) 915{ 916 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT) 917 REG_ATTRS (reg) 918 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem))); 919} 920 921/* Set the register attributes for registers contained in PARM_RTX. 922 Use needed values from memory attributes of MEM. */ 923 924void 925set_reg_attrs_for_parm (rtx parm_rtx, rtx mem) 926{
|
892 if (GET_CODE (parm_rtx) == REG)
|
927 if (REG_P (parm_rtx)) |
928 set_reg_attrs_from_mem (parm_rtx, mem); 929 else if (GET_CODE (parm_rtx) == PARALLEL) 930 { 931 /* Check for a NULL entry in the first slot, used to indicate that the 932 parameter goes both on the stack and in registers. */ 933 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1; 934 for (; i < XVECLEN (parm_rtx, 0); i++) 935 { 936 rtx x = XVECEXP (parm_rtx, 0, i);
|
902 if (GET_CODE (XEXP (x, 0)) == REG)
|
937 if (REG_P (XEXP (x, 0))) |
938 REG_ATTRS (XEXP (x, 0)) 939 = get_reg_attrs (MEM_EXPR (mem), 940 INTVAL (XEXP (x, 1))); 941 } 942 } 943} 944 945/* Assign the RTX X to declaration T. */ 946void 947set_decl_rtl (tree t, rtx x) 948{
|
914 DECL_CHECK (t)->decl.rtl = x;
|
949 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x; |
950 951 if (!x) 952 return; 953 /* For register, we maintain the reverse information too. */
|
919 if (GET_CODE (x) == REG)
|
954 if (REG_P (x)) |
955 REG_ATTRS (x) = get_reg_attrs (t, 0); 956 else if (GET_CODE (x) == SUBREG) 957 REG_ATTRS (SUBREG_REG (x)) 958 = get_reg_attrs (t, -SUBREG_BYTE (x)); 959 if (GET_CODE (x) == CONCAT) 960 { 961 if (REG_P (XEXP (x, 0))) 962 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0); 963 if (REG_P (XEXP (x, 1))) 964 REG_ATTRS (XEXP (x, 1)) 965 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0)))); 966 } 967 if (GET_CODE (x) == PARALLEL) 968 { 969 int i; 970 for (i = 0; i < XVECLEN (x, 0); i++) 971 { 972 rtx y = XVECEXP (x, 0, i); 973 if (REG_P (XEXP (y, 0))) 974 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1))); 975 } 976 } 977} 978
|
979/* Assign the RTX X to parameter declaration T. */ 980void 981set_decl_incoming_rtl (tree t, rtx x) 982{ 983 DECL_INCOMING_RTL (t) = x; 984 985 if (!x) 986 return; 987 /* For register, we maintain the reverse information too. */ 988 if (REG_P (x)) 989 REG_ATTRS (x) = get_reg_attrs (t, 0); 990 else if (GET_CODE (x) == SUBREG) 991 REG_ATTRS (SUBREG_REG (x)) 992 = get_reg_attrs (t, -SUBREG_BYTE (x)); 993 if (GET_CODE (x) == CONCAT) 994 { 995 if (REG_P (XEXP (x, 0))) 996 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0); 997 if (REG_P (XEXP (x, 1))) 998 REG_ATTRS (XEXP (x, 1)) 999 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0)))); 1000 } 1001 if (GET_CODE (x) == PARALLEL) 1002 { 1003 int i, start; 1004 1005 /* Check for a NULL entry, used to indicate that the parameter goes 1006 both on the stack and in registers. */ 1007 if (XEXP (XVECEXP (x, 0, 0), 0)) 1008 start = 0; 1009 else 1010 start = 1; 1011 1012 for (i = start; i < XVECLEN (x, 0); i++) 1013 { 1014 rtx y = XVECEXP (x, 0, i); 1015 if (REG_P (XEXP (y, 0))) 1016 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1))); 1017 } 1018 } 1019} 1020 |
1021/* Identify REG (which may be a CONCAT) as a user register. */ 1022 1023void 1024mark_user_reg (rtx reg) 1025{ 1026 if (GET_CODE (reg) == CONCAT) 1027 { 1028 REG_USERVAR_P (XEXP (reg, 0)) = 1; 1029 REG_USERVAR_P (XEXP (reg, 1)) = 1; 1030 }
|
954 else if (GET_CODE (reg) == REG)
955 REG_USERVAR_P (reg) = 1;
|
1031 else
|
957 abort ();
|
1032 { 1033 gcc_assert (REG_P (reg)); 1034 REG_USERVAR_P (reg) = 1; 1035 } |
1036} 1037 1038/* Identify REG as a probable pointer register and show its alignment 1039 as ALIGN, if nonzero. */ 1040 1041void 1042mark_reg_pointer (rtx reg, int align) 1043{ 1044 if (! REG_POINTER (reg)) 1045 { 1046 REG_POINTER (reg) = 1; 1047 1048 if (align) 1049 REGNO_POINTER_ALIGN (REGNO (reg)) = align; 1050 } 1051 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg))) 1052 /* We can no-longer be sure just how aligned this pointer is. */ 1053 REGNO_POINTER_ALIGN (REGNO (reg)) = align; 1054} 1055 1056/* Return 1 plus largest pseudo reg number used in the current function. */ 1057 1058int 1059max_reg_num (void) 1060{ 1061 return reg_rtx_no; 1062} 1063 1064/* Return 1 + the largest label number used so far in the current function. */ 1065 1066int 1067max_label_num (void) 1068{
|
991 if (last_label_num && label_num == base_label_num)
992 return last_label_num;
|
1069 return label_num; 1070} 1071 1072/* Return first label number used in this function (if any were used). */ 1073 1074int 1075get_first_label_num (void) 1076{ 1077 return first_label_num; 1078}
|
1003
1004/* Return the final regno of X, which is a SUBREG of a hard
1005 register. */
1006int
1007subreg_hard_regno (rtx x, int check_mode)
1008{
1009 enum machine_mode mode = GET_MODE (x);
1010 unsigned int byte_offset, base_regno, final_regno;
1011 rtx reg = SUBREG_REG (x);
|
1079
|
1013 /* This is where we attempt to catch illegal subregs
1014 created by the compiler. */
1015 if (GET_CODE (x) != SUBREG
1016 || GET_CODE (reg) != REG)
1017 abort ();
1018 base_regno = REGNO (reg);
1019 if (base_regno >= FIRST_PSEUDO_REGISTER)
1020 abort ();
1021 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1022 abort ();
1023#ifdef ENABLE_CHECKING
1024 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1025 SUBREG_BYTE (x), mode))
1026 abort ();
1027#endif
1028 /* Catch non-congruent offsets too. */
1029 byte_offset = SUBREG_BYTE (x);
1030 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1031 abort ();
|
1080/* If the rtx for label was created during the expansion of a nested 1081 function, then first_label_num won't include this label number. 1082 Fix this now so that array indicies work later. */ |
1083
|
1033 final_regno = subreg_regno (x);
1034
1035 return final_regno;
|
1084void 1085maybe_set_first_label_num (rtx x) 1086{ 1087 if (CODE_LABEL_NUMBER (x) < first_label_num) 1088 first_label_num = CODE_LABEL_NUMBER (x); |
1089}
|
1037
|
1090 |
1091/* Return a value representing some low-order bits of X, where the number 1092 of low-order bits is given by MODE. Note that no conversion is done 1093 between floating-point and fixed-point values, rather, the bit 1094 representation is returned. 1095 1096 This function handles the cases in common between gen_lowpart, below, 1097 and two variants in cse.c and combine.c. These are the cases that can 1098 be safely handled at all points in the compilation. 1099 1100 If this is not a case we can handle, return 0. */ 1101 1102rtx 1103gen_lowpart_common (enum machine_mode mode, rtx x) 1104{ 1105 int msize = GET_MODE_SIZE (mode); 1106 int xsize; 1107 int offset = 0; 1108 enum machine_mode innermode; 1109 1110 /* Unfortunately, this routine doesn't take a parameter for the mode of X, 1111 so we have to make one up. Yuk. */ 1112 innermode = GET_MODE (x);
|
1060 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
|
1113 if (GET_CODE (x) == CONST_INT 1114 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT) |
1115 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); 1116 else if (innermode == VOIDmode) 1117 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0); 1118 1119 xsize = GET_MODE_SIZE (innermode); 1120
|
1067 if (innermode == VOIDmode || innermode == BLKmode)
1068 abort ();
|
1121 gcc_assert (innermode != VOIDmode && innermode != BLKmode); |
1122 1123 if (innermode == mode) 1124 return x; 1125 1126 /* MODE must occupy no more words than the mode of X. */ 1127 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD 1128 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)) 1129 return 0; 1130 1131 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
|
1079 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
|
1132 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize) |
1133 return 0; 1134 1135 offset = subreg_lowpart_offset (mode, innermode); 1136 1137 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND) 1138 && (GET_MODE_CLASS (mode) == MODE_INT 1139 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)) 1140 { 1141 /* If we are getting the low-order part of something that has been 1142 sign- or zero-extended, we can either just use the object being 1143 extended or make a narrower extension. If we want an even smaller 1144 piece than the size of the object being extended, call ourselves 1145 recursively. 1146 1147 This case is used mostly by combine and cse. */ 1148 1149 if (GET_MODE (XEXP (x, 0)) == mode) 1150 return XEXP (x, 0); 1151 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))) 1152 return gen_lowpart_common (mode, XEXP (x, 0)); 1153 else if (msize < xsize) 1154 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0)); 1155 }
|
1103 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
|
1156 else if (GET_CODE (x) == SUBREG || REG_P (x) |
1157 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR 1158 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT) 1159 return simplify_gen_subreg (mode, x, innermode, offset); 1160 1161 /* Otherwise, we can't do this. */ 1162 return 0; 1163} 1164
|
1112/* Return the constant real or imaginary part (which has mode MODE)
1113 of a complex value X. The IMAGPART_P argument determines whether
1114 the real or complex component should be returned. This function
1115 returns NULL_RTX if the component isn't a constant. */
1116
1117static rtx
1118gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
1119{
1120 tree decl, part;
1121
1122 if (GET_CODE (x) == MEM
1123 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1124 {
1125 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1126 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1127 {
1128 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1129 if (TREE_CODE (part) == REAL_CST
1130 || TREE_CODE (part) == INTEGER_CST)
1131 return expand_expr (part, NULL_RTX, mode, 0);
1132 }
1133 }
1134 return NULL_RTX;
1135}
1136
1137/* Return the real part (which has mode MODE) of a complex value X.
1138 This always comes at the low address in memory. */
1139
|
1165rtx
|
1141gen_realpart (enum machine_mode mode, rtx x)
1142{
1143 rtx part;
1144
1145 /* Handle complex constants. */
1146 part = gen_complex_constant_part (mode, x, 0);
1147 if (part != NULL_RTX)
1148 return part;
1149
1150 if (WORDS_BIG_ENDIAN
1151 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1152 && REG_P (x)
1153 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1154 internal_error
1155 ("can't access real part of complex value in hard register");
1156 else if (WORDS_BIG_ENDIAN)
1157 return gen_highpart (mode, x);
1158 else
1159 return gen_lowpart (mode, x);
1160}
1161
1162/* Return the imaginary part (which has mode MODE) of a complex value X.
1163 This always comes at the high address in memory. */
1164
1165rtx
1166gen_imagpart (enum machine_mode mode, rtx x)
1167{
1168 rtx part;
1169
1170 /* Handle complex constants. */
1171 part = gen_complex_constant_part (mode, x, 1);
1172 if (part != NULL_RTX)
1173 return part;
1174
1175 if (WORDS_BIG_ENDIAN)
1176 return gen_lowpart (mode, x);
1177 else if (! WORDS_BIG_ENDIAN
1178 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1179 && REG_P (x)
1180 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1181 internal_error
1182 ("can't access imaginary part of complex value in hard register");
1183 else
1184 return gen_highpart (mode, x);
1185}
1186
1187/* Return 1 iff X, assumed to be a SUBREG,
1188 refers to the real part of the complex value in its containing reg.
1189 Complex values are always stored with the real part in the first word,
1190 regardless of WORDS_BIG_ENDIAN. */
1191
1192int
1193subreg_realpart_p (rtx x)
1194{
1195 if (GET_CODE (x) != SUBREG)
1196 abort ();
1197
1198 return ((unsigned int) SUBREG_BYTE (x)
1199 < (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1200}
1201
1202/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1203 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1204 least-significant part of X.
1205 MODE specifies how big a part of X to return;
1206 it usually should not be larger than a word.
1207 If X is a MEM whose address is a QUEUED, the value may be so also. */
1208
1209rtx
1210gen_lowpart (enum machine_mode mode, rtx x)
1211{
1212 rtx result = gen_lowpart_common (mode, x);
1213
1214 if (result)
1215 return result;
1216 else if (GET_CODE (x) == REG)
1217 {
1218 /* Must be a hard reg that's not valid in MODE. */
1219 result = gen_lowpart_common (mode, copy_to_reg (x));
1220 if (result == 0)
1221 abort ();
1222 return result;
1223 }
1224 else if (GET_CODE (x) == MEM)
1225 {
1226 /* The only additional case we can do is MEM. */
1227 int offset = 0;
1228
1229 /* The following exposes the use of "x" to CSE. */
1230 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1231 && SCALAR_INT_MODE_P (GET_MODE (x))
1232 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1233 GET_MODE_BITSIZE (GET_MODE (x)))
1234 && ! no_new_pseudos)
1235 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1236
1237 if (WORDS_BIG_ENDIAN)
1238 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1239 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1240
1241 if (BYTES_BIG_ENDIAN)
1242 /* Adjust the address so that the address-after-the-data
1243 is unchanged. */
1244 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1245 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1246
1247 return adjust_address (x, mode, offset);
1248 }
1249 else if (GET_CODE (x) == ADDRESSOF)
1250 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1251 else
1252 abort ();
1253}
1254
1255/* Like `gen_lowpart', but refer to the most significant part.
1256 This is used to access the imaginary part of a complex number. */
1257
1258rtx
|
1166gen_highpart (enum machine_mode mode, rtx x) 1167{ 1168 unsigned int msize = GET_MODE_SIZE (mode); 1169 rtx result; 1170 1171 /* This case loses if X is a subreg. To catch bugs early, 1172 complain if an invalid MODE is used even in other cases. */
|
1266 if (msize > UNITS_PER_WORD
1267 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
1268 abort ();
|
1173 gcc_assert (msize <= UNITS_PER_WORD 1174 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))); |
1175 1176 result = simplify_gen_subreg (mode, x, GET_MODE (x), 1177 subreg_highpart_offset (mode, GET_MODE (x)));
|
1272
|
1178 gcc_assert (result); 1179 |
1180 /* simplify_gen_subreg is not guaranteed to return a valid operand for 1181 the target if we have a MEM. gen_highpart must return a valid operand, 1182 emitting code if necessary to do so. */
|
1276 if (result != NULL_RTX && GET_CODE (result) == MEM)
1277 result = validize_mem (result);
1278
1279 if (!result)
1280 abort ();
|
1183 if (MEM_P (result)) 1184 { 1185 result = validize_mem (result); 1186 gcc_assert (result); 1187 } 1188 |
1189 return result; 1190} 1191 1192/* Like gen_highpart, but accept mode of EXP operand in case EXP can 1193 be VOIDmode constant. */ 1194rtx 1195gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp) 1196{ 1197 if (GET_MODE (exp) != VOIDmode) 1198 {
|
1291 if (GET_MODE (exp) != innermode)
1292 abort ();
|
1199 gcc_assert (GET_MODE (exp) == innermode); |
1200 return gen_highpart (outermode, exp); 1201 } 1202 return simplify_gen_subreg (outermode, exp, innermode, 1203 subreg_highpart_offset (outermode, innermode)); 1204} 1205 1206/* Return offset in bytes to get OUTERMODE low part 1207 of the value in mode INNERMODE stored in memory in target format. */ 1208 1209unsigned int 1210subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode) 1211{ 1212 unsigned int offset = 0; 1213 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); 1214 1215 if (difference > 0) 1216 { 1217 if (WORDS_BIG_ENDIAN) 1218 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; 1219 if (BYTES_BIG_ENDIAN) 1220 offset += difference % UNITS_PER_WORD; 1221 } 1222 1223 return offset; 1224} 1225 1226/* Return offset in bytes to get OUTERMODE high part 1227 of the value in mode INNERMODE stored in memory in target format. */ 1228unsigned int 1229subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode) 1230{ 1231 unsigned int offset = 0; 1232 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); 1233
|
1327 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1328 abort ();
|
1234 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode)); |
1235 1236 if (difference > 0) 1237 { 1238 if (! WORDS_BIG_ENDIAN) 1239 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; 1240 if (! BYTES_BIG_ENDIAN) 1241 offset += difference % UNITS_PER_WORD; 1242 } 1243 1244 return offset; 1245} 1246 1247/* Return 1 iff X, assumed to be a SUBREG, 1248 refers to the least significant part of its containing reg. 1249 If X is not a SUBREG, always return 1 (it is its own low part!). */ 1250 1251int 1252subreg_lowpart_p (rtx x) 1253{ 1254 if (GET_CODE (x) != SUBREG) 1255 return 1; 1256 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode) 1257 return 0; 1258 1259 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x))) 1260 == SUBREG_BYTE (x)); 1261} 1262 1263/* Return subword OFFSET of operand OP. 1264 The word number, OFFSET, is interpreted as the word number starting 1265 at the low-order address. OFFSET 0 is the low-order word if not 1266 WORDS_BIG_ENDIAN, otherwise it is the high-order word. 1267 1268 If we cannot extract the required word, we return zero. Otherwise, 1269 an rtx corresponding to the requested word will be returned. 1270 1271 VALIDATE_ADDRESS is nonzero if the address should be validated. Before 1272 reload has completed, a valid address will always be returned. After 1273 reload, if a valid address cannot be returned, we return zero. 1274 1275 If VALIDATE_ADDRESS is zero, we simply form the required address; validating 1276 it is the responsibility of the caller. 1277 1278 MODE is the mode of OP in case it is a CONST_INT. 1279 1280 ??? This is still rather broken for some cases. The problem for the 1281 moment is that all callers of this thing provide no 'goal mode' to 1282 tell us to work with. This exists because all callers were written 1283 in a word based SUBREG world. 1284 Now use of this function can be deprecated by simplify_subreg in most 1285 cases. 1286 */ 1287 1288rtx 1289operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode) 1290{ 1291 if (mode == VOIDmode) 1292 mode = GET_MODE (op); 1293
|
1388 if (mode == VOIDmode)
1389 abort ();
|
1294 gcc_assert (mode != VOIDmode); |
1295 1296 /* If OP is narrower than a word, fail. */ 1297 if (mode != BLKmode 1298 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)) 1299 return 0; 1300 1301 /* If we want a word outside OP, return zero. */ 1302 if (mode != BLKmode 1303 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode)) 1304 return const0_rtx; 1305 1306 /* Form a new MEM at the requested address. */
|
1402 if (GET_CODE (op) == MEM)
|
1307 if (MEM_P (op)) |
1308 { 1309 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD); 1310 1311 if (! validate_address) 1312 return new; 1313 1314 else if (reload_completed) 1315 { 1316 if (! strict_memory_address_p (word_mode, XEXP (new, 0))) 1317 return 0; 1318 } 1319 else 1320 return replace_equiv_address (new, XEXP (new, 0)); 1321 } 1322 1323 /* Rest can be handled by simplify_subreg. */ 1324 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD)); 1325} 1326
|
1422/* Similar to `operand_subword', but never return 0. If we can't extract
1423 the required subword, put OP into a register and try again. If that fails,
1424 abort. We always validate the address in this case.
|
1327/* Similar to `operand_subword', but never return 0. If we can't 1328 extract the required subword, put OP into a register and try again. 1329 The second attempt must succeed. We always validate the address in 1330 this case. |
1331 1332 MODE is the mode of OP, in case it is CONST_INT. */ 1333 1334rtx 1335operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode) 1336{ 1337 rtx result = operand_subword (op, offset, 1, mode); 1338 1339 if (result) 1340 return result; 1341 1342 if (mode != BLKmode && mode != VOIDmode) 1343 { 1344 /* If this is a register which can not be accessed by words, copy it 1345 to a pseudo register. */
|
1440 if (GET_CODE (op) == REG)
|
1346 if (REG_P (op)) |
1347 op = copy_to_reg (op); 1348 else 1349 op = force_reg (mode, op); 1350 } 1351 1352 result = operand_subword (op, offset, 1, mode);
|
1447 if (result == 0)
1448 abort ();
|
1353 gcc_assert (result); |
1354 1355 return result; 1356} 1357
|
1453/* Given a compare instruction, swap the operands.
1454 A test instruction is changed into a compare of 0 against the operand. */
1455
1456void
1457reverse_comparison (rtx insn)
1458{
1459 rtx body = PATTERN (insn);
1460 rtx comp;
1461
1462 if (GET_CODE (body) == SET)
1463 comp = SET_SRC (body);
1464 else
1465 comp = SET_SRC (XVECEXP (body, 0, 0));
1466
1467 if (GET_CODE (comp) == COMPARE)
1468 {
1469 rtx op0 = XEXP (comp, 0);
1470 rtx op1 = XEXP (comp, 1);
1471 XEXP (comp, 0) = op1;
1472 XEXP (comp, 1) = op0;
1473 }
1474 else
1475 {
1476 rtx new = gen_rtx_COMPARE (VOIDmode,
1477 CONST0_RTX (GET_MODE (comp)), comp);
1478 if (GET_CODE (body) == SET)
1479 SET_SRC (body) = new;
1480 else
1481 SET_SRC (XVECEXP (body, 0, 0)) = new;
1482 }
1483}
1484
|
1358/* Within a MEM_EXPR, we care about either (1) a component ref of a decl, 1359 or (2) a component ref of something variable. Represent the later with 1360 a NULL expression. */ 1361 1362static tree 1363component_ref_for_mem_expr (tree ref) 1364{ 1365 tree inner = TREE_OPERAND (ref, 0); 1366 1367 if (TREE_CODE (inner) == COMPONENT_REF) 1368 inner = component_ref_for_mem_expr (inner); 1369 else 1370 {
|
1498 tree placeholder_ptr = 0;
1499
|
1371 /* Now remove any conversions: they don't change what the underlying
|
1501 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
|
1372 object is. Likewise for SAVE_EXPR. */ |
1373 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR 1374 || TREE_CODE (inner) == NON_LVALUE_EXPR 1375 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
|
1505 || TREE_CODE (inner) == SAVE_EXPR
1506 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1507 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1508 inner = find_placeholder (inner, &placeholder_ptr);
1509 else
1510 inner = TREE_OPERAND (inner, 0);
|
1376 || TREE_CODE (inner) == SAVE_EXPR) 1377 inner = TREE_OPERAND (inner, 0); |
1378 1379 if (! DECL_P (inner)) 1380 inner = NULL_TREE; 1381 } 1382 1383 if (inner == TREE_OPERAND (ref, 0)) 1384 return ref; 1385 else
|
1519 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1520 TREE_OPERAND (ref, 1));
|
1386 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner, 1387 TREE_OPERAND (ref, 1), NULL_TREE); |
1388} 1389 1390/* Returns 1 if both MEM_EXPR can be considered equal 1391 and 0 otherwise. */ 1392 1393int 1394mem_expr_equal_p (tree expr1, tree expr2) 1395{ 1396 if (expr1 == expr2) 1397 return 1; 1398 1399 if (! expr1 || ! expr2) 1400 return 0; 1401 1402 if (TREE_CODE (expr1) != TREE_CODE (expr2)) 1403 return 0; 1404 1405 if (TREE_CODE (expr1) == COMPONENT_REF) 1406 return 1407 mem_expr_equal_p (TREE_OPERAND (expr1, 0), 1408 TREE_OPERAND (expr2, 0)) 1409 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */ 1410 TREE_OPERAND (expr2, 1)); 1411
|
1545 if (TREE_CODE (expr1) == INDIRECT_REF)
|
1412 if (INDIRECT_REF_P (expr1)) |
1413 return mem_expr_equal_p (TREE_OPERAND (expr1, 0), 1414 TREE_OPERAND (expr2, 0));
|
1548
1549 /* Decls with different pointers can't be equal. */
1550 if (DECL_P (expr1))
1551 return 0;
|
1415
|
1553 abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
|
1416 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already |
1417 have been resolved here. */
|
1418 gcc_assert (DECL_P (expr1)); 1419 1420 /* Decls with different pointers can't be equal. */ 1421 return 0; |
1422} 1423 1424/* Given REF, a MEM, and T, either the type of X or the expression 1425 corresponding to REF, set the memory attributes. OBJECTP is nonzero 1426 if we are making a new object of this type. BITPOS is nonzero if 1427 there is an offset outstanding on T that will be applied later. */ 1428 1429void 1430set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, 1431 HOST_WIDE_INT bitpos) 1432{ 1433 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref); 1434 tree expr = MEM_EXPR (ref); 1435 rtx offset = MEM_OFFSET (ref); 1436 rtx size = MEM_SIZE (ref); 1437 unsigned int align = MEM_ALIGN (ref); 1438 HOST_WIDE_INT apply_bitpos = 0; 1439 tree type; 1440 1441 /* It can happen that type_for_mode was given a mode for which there 1442 is no language-level type. In which case it returns NULL, which 1443 we can see here. */ 1444 if (t == NULL_TREE) 1445 return; 1446 1447 type = TYPE_P (t) ? t : TREE_TYPE (t); 1448 if (type == error_mark_node) 1449 return; 1450 1451 /* If we have already set DECL_RTL = ref, get_alias_set will get the 1452 wrong answer, as it assumes that DECL_RTL already has the right alias 1453 info. Callers should not set DECL_RTL until after the call to 1454 set_mem_attributes. */
|
1588 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1589 abort ();
|
1455 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); |
1456 1457 /* Get the alias set from the expression or type (perhaps using a 1458 front-end routine) and use it. */ 1459 alias = get_alias_set (t); 1460
|
1595 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
|
1461 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type); |
1462 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
|
1597 RTX_UNCHANGING_P (ref)
1598 |= ((lang_hooks.honor_readonly
1599 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1600 || (! TYPE_P (t) && TREE_CONSTANT (t)));
|
1463 MEM_POINTER (ref) = POINTER_TYPE_P (type); |
1464 1465 /* If we are making an object of this type, or if this is a DECL, we know 1466 that it is a scalar if the type is not an aggregate. */ 1467 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type)) 1468 MEM_SCALAR_P (ref) = 1; 1469 1470 /* We can set the alignment from the type if we are making an object, 1471 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
|
1609 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
|
1472 if (objectp || TREE_CODE (t) == INDIRECT_REF 1473 || TREE_CODE (t) == ALIGN_INDIRECT_REF 1474 || TYPE_ALIGN_OK (type)) |
1475 align = MAX (align, TYPE_ALIGN (type));
|
1476 else 1477 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF) 1478 { 1479 if (integer_zerop (TREE_OPERAND (t, 1))) 1480 /* We don't know anything about the alignment. */ 1481 align = BITS_PER_UNIT; 1482 else 1483 align = tree_low_cst (TREE_OPERAND (t, 1), 1); 1484 } |
1485 1486 /* If the size is known, we can set that. */ 1487 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1)) 1488 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1)); 1489 1490 /* If T is not a type, we may be able to deduce some more information about 1491 the expression. */ 1492 if (! TYPE_P (t)) 1493 {
|
1620 maybe_set_unchanging (ref, t);
|
1494 tree base; 1495 |
1496 if (TREE_THIS_VOLATILE (t)) 1497 MEM_VOLATILE_P (ref) = 1; 1498 1499 /* Now remove any conversions: they don't change what the underlying 1500 object is. Likewise for SAVE_EXPR. */ 1501 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR 1502 || TREE_CODE (t) == NON_LVALUE_EXPR 1503 || TREE_CODE (t) == VIEW_CONVERT_EXPR 1504 || TREE_CODE (t) == SAVE_EXPR) 1505 t = TREE_OPERAND (t, 0); 1506
|
1632 /* If this expression can't be addressed (e.g., it contains a reference
1633 to a non-addressable field), show we don't change its alias set. */
1634 if (! can_address_p (t))
|
1507 /* We may look through structure-like accesses for the purposes of 1508 examining TREE_THIS_NOTRAP, but not array-like accesses. */ 1509 base = t; 1510 while (TREE_CODE (base) == COMPONENT_REF 1511 || TREE_CODE (base) == REALPART_EXPR 1512 || TREE_CODE (base) == IMAGPART_EXPR 1513 || TREE_CODE (base) == BIT_FIELD_REF) 1514 base = TREE_OPERAND (base, 0); 1515 1516 if (DECL_P (base)) 1517 { 1518 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS)) 1519 MEM_NOTRAP_P (ref) = !DECL_WEAK (base); 1520 else 1521 MEM_NOTRAP_P (ref) = 1; 1522 } 1523 else 1524 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base); 1525 1526 base = get_base_address (base); 1527 if (base && DECL_P (base) 1528 && TREE_READONLY (base) 1529 && (TREE_STATIC (base) || DECL_EXTERNAL (base))) 1530 { 1531 tree base_type = TREE_TYPE (base); 1532 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type)) 1533 || DECL_ARTIFICIAL (base)); 1534 MEM_READONLY_P (ref) = 1; 1535 } 1536 1537 /* If this expression uses it's parent's alias set, mark it such 1538 that we won't change it. */ 1539 if (component_uses_parent_alias_set (t)) |
1540 MEM_KEEP_ALIAS_SET_P (ref) = 1; 1541 1542 /* If this is a decl, set the attributes of the MEM from it. */ 1543 if (DECL_P (t)) 1544 { 1545 expr = t; 1546 offset = const0_rtx; 1547 apply_bitpos = bitpos; 1548 size = (DECL_SIZE_UNIT (t) 1549 && host_integerp (DECL_SIZE_UNIT (t), 1) 1550 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0); 1551 align = DECL_ALIGN (t); 1552 } 1553 1554 /* If this is a constant, we know the alignment. */
|
1650 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
|
1555 else if (CONSTANT_CLASS_P (t)) |
1556 { 1557 align = TYPE_ALIGN (type); 1558#ifdef CONSTANT_ALIGNMENT 1559 align = CONSTANT_ALIGNMENT (t, align); 1560#endif 1561 } 1562 1563 /* If this is a field reference and not a bit-field, record it. */ 1564 /* ??? There is some information that can be gleened from bit-fields, 1565 such as the word offset in the structure that might be modified. 1566 But skip it for now. */ 1567 else if (TREE_CODE (t) == COMPONENT_REF 1568 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1))) 1569 { 1570 expr = component_ref_for_mem_expr (t); 1571 offset = const0_rtx; 1572 apply_bitpos = bitpos; 1573 /* ??? Any reason the field size would be different than 1574 the size we got from the type? */ 1575 } 1576 1577 /* If this is an array reference, look for an outer field reference. */ 1578 else if (TREE_CODE (t) == ARRAY_REF) 1579 { 1580 tree off_tree = size_zero_node; 1581 /* We can't modify t, because we use it at the end of the 1582 function. */ 1583 tree t2 = t; 1584 1585 do 1586 { 1587 tree index = TREE_OPERAND (t2, 1);
|
1683 tree array = TREE_OPERAND (t2, 0);
1684 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1685 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1686 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
|
1588 tree low_bound = array_ref_low_bound (t2); 1589 tree unit_size = array_ref_element_size (t2); |
1590 1591 /* We assume all arrays have sizes that are a multiple of a byte. 1592 First subtract the lower bound, if any, in the type of the
|
1690 index, then convert to sizetype and multiply by the size of the
1691 array element. */
1692 if (low_bound != 0 && ! integer_zerop (low_bound))
1693 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1694 index, low_bound));
|
1593 index, then convert to sizetype and multiply by the size of 1594 the array element. */ 1595 if (! integer_zerop (low_bound)) 1596 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), 1597 index, low_bound); |
1598
|
1696 /* If the index has a self-referential type, pass it to a
1697 WITH_RECORD_EXPR; if the component size is, pass our
1698 component to one. */
1699 if (CONTAINS_PLACEHOLDER_P (index))
1700 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
1701 if (CONTAINS_PLACEHOLDER_P (unit_size))
1702 unit_size = build (WITH_RECORD_EXPR, sizetype,
1703 unit_size, array);
1704
1705 off_tree
1706 = fold (build (PLUS_EXPR, sizetype,
1707 fold (build (MULT_EXPR, sizetype,
1708 index,
1709 unit_size)),
1710 off_tree));
|
1599 off_tree = size_binop (PLUS_EXPR, 1600 size_binop (MULT_EXPR, 1601 fold_convert (sizetype, 1602 index), 1603 unit_size), 1604 off_tree); |
1605 t2 = TREE_OPERAND (t2, 0); 1606 } 1607 while (TREE_CODE (t2) == ARRAY_REF); 1608 1609 if (DECL_P (t2)) 1610 { 1611 expr = t2; 1612 offset = NULL; 1613 if (host_integerp (off_tree, 1)) 1614 { 1615 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1); 1616 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT; 1617 align = DECL_ALIGN (t2); 1618 if (aoff && (unsigned HOST_WIDE_INT) aoff < align) 1619 align = aoff; 1620 offset = GEN_INT (ioff); 1621 apply_bitpos = bitpos; 1622 } 1623 } 1624 else if (TREE_CODE (t2) == COMPONENT_REF) 1625 { 1626 expr = component_ref_for_mem_expr (t2); 1627 if (host_integerp (off_tree, 1)) 1628 { 1629 offset = GEN_INT (tree_low_cst (off_tree, 1)); 1630 apply_bitpos = bitpos; 1631 } 1632 /* ??? Any reason the field size would be different than 1633 the size we got from the type? */ 1634 } 1635 else if (flag_argument_noalias > 1
|
1742 && TREE_CODE (t2) == INDIRECT_REF
|
1636 && (INDIRECT_REF_P (t2)) |
1637 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL) 1638 { 1639 expr = t2; 1640 offset = NULL; 1641 } 1642 } 1643 1644 /* If this is a Fortran indirect argument reference, record the 1645 parameter decl. */ 1646 else if (flag_argument_noalias > 1
|
1753 && TREE_CODE (t) == INDIRECT_REF
|
1647 && (INDIRECT_REF_P (t)) |
1648 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL) 1649 { 1650 expr = t; 1651 offset = NULL; 1652 } 1653 } 1654 1655 /* If we modified OFFSET based on T, then subtract the outstanding 1656 bit position offset. Similarly, increase the size of the accessed 1657 object to contain the negative offset. */ 1658 if (apply_bitpos) 1659 { 1660 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT)); 1661 if (size) 1662 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT); 1663 } 1664
|
1665 if (TREE_CODE (t) == ALIGN_INDIRECT_REF) 1666 { 1667 /* Force EXPR and OFFSE to NULL, since we don't know exactly what 1668 we're overlapping. */ 1669 offset = NULL; 1670 expr = NULL; 1671 } 1672 |
1673 /* Now set the attributes we computed above. */ 1674 MEM_ATTRS (ref) 1675 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref)); 1676 1677 /* If this is already known to be a scalar or aggregate, we are done. */ 1678 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref)) 1679 return; 1680 1681 /* If it is a reference into an aggregate, this is part of an aggregate. 1682 Otherwise we don't know. */ 1683 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF 1684 || TREE_CODE (t) == ARRAY_RANGE_REF 1685 || TREE_CODE (t) == BIT_FIELD_REF) 1686 MEM_IN_STRUCT_P (ref) = 1; 1687} 1688 1689void 1690set_mem_attributes (rtx ref, tree t, int objectp) 1691{ 1692 set_mem_attributes_minus_bitpos (ref, t, objectp, 0); 1693} 1694 1695/* Set the decl for MEM to DECL. */ 1696 1697void 1698set_mem_attrs_from_reg (rtx mem, rtx reg) 1699{ 1700 MEM_ATTRS (mem) 1701 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg), 1702 GEN_INT (REG_OFFSET (reg)), 1703 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem)); 1704} 1705 1706/* Set the alias set of MEM to SET. */ 1707 1708void 1709set_mem_alias_set (rtx mem, HOST_WIDE_INT set) 1710{ 1711#ifdef ENABLE_CHECKING 1712 /* If the new and old alias sets don't conflict, something is wrong. */
|
1811 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1812 abort ();
|
1713 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); |
1714#endif 1715 1716 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem), 1717 MEM_SIZE (mem), MEM_ALIGN (mem), 1718 GET_MODE (mem)); 1719} 1720 1721/* Set the alignment of MEM to ALIGN bits. */ 1722 1723void 1724set_mem_align (rtx mem, unsigned int align) 1725{ 1726 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem), 1727 MEM_OFFSET (mem), MEM_SIZE (mem), align, 1728 GET_MODE (mem)); 1729} 1730 1731/* Set the expr for MEM to EXPR. */ 1732 1733void 1734set_mem_expr (rtx mem, tree expr) 1735{ 1736 MEM_ATTRS (mem) 1737 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem), 1738 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem)); 1739} 1740 1741/* Set the offset of MEM to OFFSET. */ 1742 1743void 1744set_mem_offset (rtx mem, rtx offset) 1745{ 1746 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem), 1747 offset, MEM_SIZE (mem), MEM_ALIGN (mem), 1748 GET_MODE (mem)); 1749} 1750 1751/* Set the size of MEM to SIZE. */ 1752 1753void 1754set_mem_size (rtx mem, rtx size) 1755{ 1756 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem), 1757 MEM_OFFSET (mem), size, MEM_ALIGN (mem), 1758 GET_MODE (mem)); 1759} 1760 1761/* Return a memory reference like MEMREF, but with its mode changed to MODE 1762 and its address changed to ADDR. (VOIDmode means don't change the mode. 1763 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the 1764 returned memory location is required to be valid. The memory 1765 attributes are not changed. */ 1766 1767static rtx 1768change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate) 1769{ 1770 rtx new; 1771
|
1871 if (GET_CODE (memref) != MEM)
1872 abort ();
|
1772 gcc_assert (MEM_P (memref)); |
1773 if (mode == VOIDmode) 1774 mode = GET_MODE (memref); 1775 if (addr == 0) 1776 addr = XEXP (memref, 0); 1777 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0) 1778 && (!validate || memory_address_p (mode, addr))) 1779 return memref; 1780 1781 if (validate) 1782 { 1783 if (reload_in_progress || reload_completed)
|
1884 {
1885 if (! memory_address_p (mode, addr))
1886 abort ();
1887 }
|
1784 gcc_assert (memory_address_p (mode, addr)); |
1785 else 1786 addr = memory_address (mode, addr); 1787 } 1788 1789 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref)) 1790 return memref; 1791 1792 new = gen_rtx_MEM (mode, addr); 1793 MEM_COPY_ATTRIBUTES (new, memref); 1794 return new; 1795} 1796 1797/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what 1798 way we are changing MEMREF, so we only preserve the alias set. */ 1799 1800rtx 1801change_address (rtx memref, enum machine_mode mode, rtx addr) 1802{ 1803 rtx new = change_address_1 (memref, mode, addr, 1), size; 1804 enum machine_mode mmode = GET_MODE (new); 1805 unsigned int align; 1806 1807 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)); 1808 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode); 1809 1810 /* If there are no changes, just return the original memory reference. */ 1811 if (new == memref) 1812 { 1813 if (MEM_ATTRS (memref) == 0 1814 || (MEM_EXPR (memref) == NULL 1815 && MEM_OFFSET (memref) == NULL 1816 && MEM_SIZE (memref) == size 1817 && MEM_ALIGN (memref) == align)) 1818 return new; 1819 1820 new = gen_rtx_MEM (mmode, XEXP (memref, 0)); 1821 MEM_COPY_ATTRIBUTES (new, memref); 1822 } 1823 1824 MEM_ATTRS (new) 1825 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode); 1826 1827 return new; 1828} 1829 1830/* Return a memory reference like MEMREF, but with its mode changed 1831 to MODE and its address offset by OFFSET bytes. If VALIDATE is 1832 nonzero, the memory address is forced to be valid. 1833 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS 1834 and caller is responsible for adjusting MEMREF base register. */ 1835 1836rtx 1837adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset, 1838 int validate, int adjust) 1839{ 1840 rtx addr = XEXP (memref, 0); 1841 rtx new; 1842 rtx memoffset = MEM_OFFSET (memref); 1843 rtx size = 0; 1844 unsigned int memalign = MEM_ALIGN (memref); 1845 1846 /* If there are no changes, just return the original memory reference. */ 1847 if (mode == GET_MODE (memref) && !offset 1848 && (!validate || memory_address_p (mode, addr))) 1849 return memref; 1850 1851 /* ??? Prefer to create garbage instead of creating shared rtl. 1852 This may happen even if offset is nonzero -- consider 1853 (plus (plus reg reg) const_int) -- so do this always. */ 1854 addr = copy_rtx (addr); 1855 1856 if (adjust) 1857 { 1858 /* If MEMREF is a LO_SUM and the offset is within the alignment of the 1859 object, we can merge it into the LO_SUM. */ 1860 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM 1861 && offset >= 0 1862 && (unsigned HOST_WIDE_INT) offset 1863 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) 1864 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0), 1865 plus_constant (XEXP (addr, 1), offset)); 1866 else 1867 addr = plus_constant (addr, offset); 1868 } 1869 1870 new = change_address_1 (memref, mode, addr, validate); 1871 1872 /* Compute the new values of the memory attributes due to this adjustment. 1873 We add the offsets and update the alignment. */ 1874 if (memoffset) 1875 memoffset = GEN_INT (offset + INTVAL (memoffset)); 1876 1877 /* Compute the new alignment by taking the MIN of the alignment and the 1878 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET 1879 if zero. */ 1880 if (offset != 0) 1881 memalign 1882 = MIN (memalign, 1883 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT); 1884 1885 /* We can compute the size in a number of ways. */ 1886 if (GET_MODE (new) != BLKmode) 1887 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new))); 1888 else if (MEM_SIZE (memref)) 1889 size = plus_constant (MEM_SIZE (memref), -offset); 1890 1891 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 1892 memoffset, size, memalign, GET_MODE (new)); 1893 1894 /* At some point, we should validate that this offset is within the object, 1895 if all the appropriate values are known. */ 1896 return new; 1897} 1898 1899/* Return a memory reference like MEMREF, but with its mode changed 1900 to MODE and its address changed to ADDR, which is assumed to be 1901 MEMREF offseted by OFFSET bytes. If VALIDATE is 1902 nonzero, the memory address is forced to be valid. */ 1903 1904rtx 1905adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr, 1906 HOST_WIDE_INT offset, int validate) 1907{ 1908 memref = change_address_1 (memref, VOIDmode, addr, validate); 1909 return adjust_address_1 (memref, mode, offset, validate, 0); 1910} 1911 1912/* Return a memory reference like MEMREF, but whose address is changed by 1913 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor 1914 known to be in OFFSET (possibly 1). */ 1915 1916rtx 1917offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) 1918{ 1919 rtx new, addr = XEXP (memref, 0); 1920 1921 new = simplify_gen_binary (PLUS, Pmode, addr, offset); 1922 1923 /* At this point we don't know _why_ the address is invalid. It 1924 could have secondary memory references, multiplies or anything. 1925 1926 However, if we did go and rearrange things, we can wind up not 1927 being able to recognize the magic around pic_offset_table_rtx. 1928 This stuff is fragile, and is yet another example of why it is 1929 bad to expose PIC machinery too early. */ 1930 if (! memory_address_p (GET_MODE (memref), new) 1931 && GET_CODE (addr) == PLUS 1932 && XEXP (addr, 0) == pic_offset_table_rtx) 1933 { 1934 addr = force_reg (GET_MODE (addr), addr); 1935 new = simplify_gen_binary (PLUS, Pmode, addr, offset); 1936 } 1937 1938 update_temp_slot_address (XEXP (memref, 0), new); 1939 new = change_address_1 (memref, VOIDmode, new, 1); 1940 1941 /* If there are no changes, just return the original memory reference. */ 1942 if (new == memref) 1943 return new; 1944 1945 /* Update the alignment to reflect the offset. Reset the offset, which 1946 we don't know. */ 1947 MEM_ATTRS (new) 1948 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0, 1949 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT), 1950 GET_MODE (new)); 1951 return new; 1952} 1953 1954/* Return a memory reference like MEMREF, but with its address changed to 1955 ADDR. The caller is asserting that the actual piece of memory pointed 1956 to is the same, just the form of the address is being changed, such as 1957 by putting something into a register. */ 1958 1959rtx 1960replace_equiv_address (rtx memref, rtx addr) 1961{ 1962 /* change_address_1 copies the memory attribute structure without change 1963 and that's exactly what we want here. */ 1964 update_temp_slot_address (XEXP (memref, 0), addr); 1965 return change_address_1 (memref, VOIDmode, addr, 1); 1966} 1967 1968/* Likewise, but the reference is not required to be valid. */ 1969 1970rtx 1971replace_equiv_address_nv (rtx memref, rtx addr) 1972{ 1973 return change_address_1 (memref, VOIDmode, addr, 0); 1974} 1975 1976/* Return a memory reference like MEMREF, but with its mode widened to 1977 MODE and offset by OFFSET. This would be used by targets that e.g. 1978 cannot issue QImode memory operations and have to use SImode memory 1979 operations plus masking logic. */ 1980 1981rtx 1982widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset) 1983{ 1984 rtx new = adjust_address_1 (memref, mode, offset, 1, 1); 1985 tree expr = MEM_EXPR (new); 1986 rtx memoffset = MEM_OFFSET (new); 1987 unsigned int size = GET_MODE_SIZE (mode); 1988 1989 /* If there are no changes, just return the original memory reference. */ 1990 if (new == memref) 1991 return new; 1992 1993 /* If we don't know what offset we were at within the expression, then 1994 we can't know if we've overstepped the bounds. */ 1995 if (! memoffset) 1996 expr = NULL_TREE; 1997 1998 while (expr) 1999 { 2000 if (TREE_CODE (expr) == COMPONENT_REF) 2001 { 2002 tree field = TREE_OPERAND (expr, 1);
|
2003 tree offset = component_ref_field_offset (expr); |
2004 2005 if (! DECL_SIZE_UNIT (field)) 2006 { 2007 expr = NULL_TREE; 2008 break; 2009 } 2010 2011 /* Is the field at least as large as the access? If so, ok, 2012 otherwise strip back to the containing structure. */ 2013 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST 2014 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0 2015 && INTVAL (memoffset) >= 0) 2016 break; 2017
|
2120 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
|
2018 if (! host_integerp (offset, 1)) |
2019 { 2020 expr = NULL_TREE; 2021 break; 2022 } 2023 2024 expr = TREE_OPERAND (expr, 0);
|
2127 memoffset = (GEN_INT (INTVAL (memoffset)
2128 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2129 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2130 / BITS_PER_UNIT)));
|
2025 memoffset 2026 = (GEN_INT (INTVAL (memoffset) 2027 + tree_low_cst (offset, 1) 2028 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1) 2029 / BITS_PER_UNIT))); |
2030 } 2031 /* Similarly for the decl. */ 2032 else if (DECL_P (expr) 2033 && DECL_SIZE_UNIT (expr) 2034 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST 2035 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0 2036 && (! memoffset || INTVAL (memoffset) >= 0)) 2037 break; 2038 else 2039 { 2040 /* The widened memory access overflows the expression, which means 2041 that it could alias another expression. Zap it. */ 2042 expr = NULL_TREE; 2043 break; 2044 } 2045 } 2046 2047 if (! expr) 2048 memoffset = NULL_RTX; 2049 2050 /* The widened memory may alias other stuff, so zap the alias set. */ 2051 /* ??? Maybe use get_alias_set on any remaining expression. */ 2052 2053 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size), 2054 MEM_ALIGN (new), mode); 2055 2056 return new; 2057} 2058 2059/* Return a newly created CODE_LABEL rtx with a unique label number. */ 2060 2061rtx 2062gen_label_rtx (void) 2063{ 2064 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX, 2065 NULL, label_num++, NULL); 2066} 2067 2068/* For procedure integration. */ 2069 2070/* Install new pointers to the first and last insns in the chain. 2071 Also, set cur_insn_uid to one higher than the last in use. 2072 Used for an inline-procedure after copying the insn chain. */ 2073 2074void 2075set_new_first_and_last_insn (rtx first, rtx last) 2076{ 2077 rtx insn; 2078 2079 first_insn = first; 2080 last_insn = last; 2081 cur_insn_uid = 0; 2082 2083 for (insn = first; insn; insn = NEXT_INSN (insn)) 2084 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); 2085 2086 cur_insn_uid++; 2087}
|
2189
2190/* Set the last label number found in the current function.
2191 This is used when belatedly compiling an inline function. */
2192
2193void
2194set_new_last_label_num (int last)
2195{
2196 base_label_num = label_num;
2197 last_label_num = last;
2198}
|
2088
|
2200/* Restore all variables describing the current status from the structure *P.
2201 This is used after a nested function. */
2202
2203void
2204restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
2205{
2206 last_label_num = 0;
2207}
2208
|
2089/* Go through all the RTL insn bodies and copy any invalid shared 2090 structure. This routine should only be called once. */ 2091
|
2212void
2213unshare_all_rtl (tree fndecl, rtx insn)
|
2092static void 2093unshare_all_rtl_1 (tree fndecl, rtx insn) |
2094{ 2095 tree decl; 2096 2097 /* Make sure that virtual parameters are not shared. */ 2098 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) 2099 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl))); 2100 2101 /* Make sure that virtual stack slots are not shared. */ 2102 unshare_all_decls (DECL_INITIAL (fndecl)); 2103 2104 /* Unshare just about everything else. */ 2105 unshare_all_rtl_in_chain (insn); 2106 2107 /* Make sure the addresses of stack slots found outside the insn chain 2108 (such as, in DECL_RTL of a variable) are not shared 2109 with the insn chain. 2110 2111 This special care is necessary when the stack slot MEM does not 2112 actually appear in the insn chain. If it does appear, its address 2113 is unshared from all else at that point. */ 2114 stack_slot_list = copy_rtx_if_shared (stack_slot_list); 2115} 2116 2117/* Go through all the RTL insn bodies and copy any invalid shared 2118 structure, again. This is a fairly expensive thing to do so it 2119 should be done sparingly. */ 2120 2121void 2122unshare_all_rtl_again (rtx insn) 2123{ 2124 rtx p; 2125 tree decl; 2126 2127 for (p = insn; p; p = NEXT_INSN (p)) 2128 if (INSN_P (p)) 2129 { 2130 reset_used_flags (PATTERN (p)); 2131 reset_used_flags (REG_NOTES (p)); 2132 reset_used_flags (LOG_LINKS (p)); 2133 } 2134 2135 /* Make sure that virtual stack slots are not shared. */ 2136 reset_used_decls (DECL_INITIAL (cfun->decl)); 2137 2138 /* Make sure that virtual parameters are not shared. */ 2139 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl)) 2140 reset_used_flags (DECL_RTL (decl)); 2141 2142 reset_used_flags (stack_slot_list); 2143
|
2264 unshare_all_rtl (cfun->decl, insn);
|
2144 unshare_all_rtl_1 (cfun->decl, insn); |
2145} 2146
|
2147unsigned int 2148unshare_all_rtl (void) 2149{ 2150 unshare_all_rtl_1 (current_function_decl, get_insns ()); 2151 return 0; 2152} 2153 2154struct tree_opt_pass pass_unshare_all_rtl = 2155{ 2156 "unshare", /* name */ 2157 NULL, /* gate */ 2158 unshare_all_rtl, /* execute */ 2159 NULL, /* sub */ 2160 NULL, /* next */ 2161 0, /* static_pass_number */ 2162 0, /* tv_id */ 2163 0, /* properties_required */ 2164 0, /* properties_provided */ 2165 0, /* properties_destroyed */ 2166 0, /* todo_flags_start */ 2167 TODO_dump_func, /* todo_flags_finish */ 2168 0 /* letter */ 2169}; 2170 2171 |
2172/* Check that ORIG is not marked when it should not be and mark ORIG as in use, 2173 Recursively does the same for subexpressions. */ 2174 2175static void 2176verify_rtx_sharing (rtx orig, rtx insn) 2177{ 2178 rtx x = orig; 2179 int i; 2180 enum rtx_code code; 2181 const char *format_ptr; 2182 2183 if (x == 0) 2184 return; 2185 2186 code = GET_CODE (x); 2187 2188 /* These types may be freely shared. */ 2189 2190 switch (code) 2191 { 2192 case REG:
|
2288 case QUEUED:
|
2193 case CONST_INT: 2194 case CONST_DOUBLE: 2195 case CONST_VECTOR: 2196 case SYMBOL_REF: 2197 case LABEL_REF: 2198 case CODE_LABEL: 2199 case PC: 2200 case CC0: 2201 case SCRATCH:
|
2298 /* SCRATCH must be shared because they represent distinct values. */
|
2202 return;
|
2203 /* SCRATCH must be shared because they represent distinct values. */ 2204 case CLOBBER: 2205 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) 2206 return; 2207 break; |
2208 2209 case CONST: 2210 /* CONST can be shared if it contains a SYMBOL_REF. If it contains 2211 a LABEL_REF, it isn't sharable. */ 2212 if (GET_CODE (XEXP (x, 0)) == PLUS 2213 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 2214 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) 2215 return; 2216 break; 2217 2218 case MEM: 2219 /* A MEM is allowed to be shared if its address is constant. */ 2220 if (CONSTANT_ADDRESS_P (XEXP (x, 0)) 2221 || reload_completed || reload_in_progress) 2222 return; 2223 2224 break; 2225 2226 default: 2227 break; 2228 } 2229 2230 /* This rtx may not be shared. If it has already been seen, 2231 replace it with a copy of itself. */
|
2324
|
2232#ifdef ENABLE_CHECKING |
2233 if (RTX_FLAG (x, used)) 2234 {
|
2327 error ("Invalid rtl sharing found in the insn");
|
2235 error ("invalid rtl sharing found in the insn"); |
2236 debug_rtx (insn);
|
2329 error ("Shared rtx");
|
2237 error ("shared rtx"); |
2238 debug_rtx (x);
|
2331 abort ();
|
2239 internal_error ("internal consistency failure"); |
2240 }
|
2241#endif 2242 gcc_assert (!RTX_FLAG (x, used)); 2243 |
2244 RTX_FLAG (x, used) = 1; 2245 2246 /* Now scan the subexpressions recursively. */ 2247 2248 format_ptr = GET_RTX_FORMAT (code); 2249 2250 for (i = 0; i < GET_RTX_LENGTH (code); i++) 2251 { 2252 switch (*format_ptr++) 2253 { 2254 case 'e': 2255 verify_rtx_sharing (XEXP (x, i), insn); 2256 break; 2257 2258 case 'E': 2259 if (XVEC (x, i) != NULL) 2260 { 2261 int j; 2262 int len = XVECLEN (x, i); 2263 2264 for (j = 0; j < len; j++) 2265 {
|
2355 /* We allow sharing of ASM_OPERANDS inside single instruction. */
|
2266 /* We allow sharing of ASM_OPERANDS inside single 2267 instruction. */ |
2268 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
|
2357 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
|
2269 && (GET_CODE (SET_SRC (XVECEXP (x, i, j))) 2270 == ASM_OPERANDS)) |
2271 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn); 2272 else 2273 verify_rtx_sharing (XVECEXP (x, i, j), insn); 2274 } 2275 } 2276 break; 2277 } 2278 } 2279 return; 2280} 2281 2282/* Go through all the RTL insn bodies and check that there is no unexpected 2283 sharing in between the subexpressions. */ 2284 2285void 2286verify_rtl_sharing (void) 2287{ 2288 rtx p; 2289 2290 for (p = get_insns (); p; p = NEXT_INSN (p)) 2291 if (INSN_P (p)) 2292 { 2293 reset_used_flags (PATTERN (p)); 2294 reset_used_flags (REG_NOTES (p)); 2295 reset_used_flags (LOG_LINKS (p)); 2296 } 2297 2298 for (p = get_insns (); p; p = NEXT_INSN (p)) 2299 if (INSN_P (p)) 2300 { 2301 verify_rtx_sharing (PATTERN (p), p); 2302 verify_rtx_sharing (REG_NOTES (p), p); 2303 verify_rtx_sharing (LOG_LINKS (p), p); 2304 } 2305} 2306 2307/* Go through all the RTL insn bodies and copy any invalid shared structure. 2308 Assumes the mark bits are cleared at entry. */ 2309 2310void 2311unshare_all_rtl_in_chain (rtx insn) 2312{ 2313 for (; insn; insn = NEXT_INSN (insn)) 2314 if (INSN_P (insn)) 2315 { 2316 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn)); 2317 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn)); 2318 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn)); 2319 } 2320} 2321 2322/* Go through all virtual stack slots of a function and copy any 2323 shared structure. */ 2324static void 2325unshare_all_decls (tree blk) 2326{ 2327 tree t; 2328 2329 /* Copy shared decls. */ 2330 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t)) 2331 if (DECL_RTL_SET_P (t)) 2332 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t))); 2333 2334 /* Now process sub-blocks. */ 2335 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t)) 2336 unshare_all_decls (t); 2337} 2338 2339/* Go through all virtual stack slots of a function and mark them as 2340 not shared. */ 2341static void 2342reset_used_decls (tree blk) 2343{ 2344 tree t; 2345 2346 /* Mark decls. */ 2347 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t)) 2348 if (DECL_RTL_SET_P (t)) 2349 reset_used_flags (DECL_RTL (t)); 2350 2351 /* Now process sub-blocks. */ 2352 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t)) 2353 reset_used_decls (t); 2354} 2355
|
2443/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2444 placed in the result directly, rather than being copied. MAY_SHARE is
2445 either a MEM of an EXPR_LIST of MEMs. */
2446
2447rtx
2448copy_most_rtx (rtx orig, rtx may_share)
2449{
2450 rtx copy;
2451 int i, j;
2452 RTX_CODE code;
2453 const char *format_ptr;
2454
2455 if (orig == may_share
2456 || (GET_CODE (may_share) == EXPR_LIST
2457 && in_expr_list_p (may_share, orig)))
2458 return orig;
2459
2460 code = GET_CODE (orig);
2461
2462 switch (code)
2463 {
2464 case REG:
2465 case QUEUED:
2466 case CONST_INT:
2467 case CONST_DOUBLE:
2468 case CONST_VECTOR:
2469 case SYMBOL_REF:
2470 case CODE_LABEL:
2471 case PC:
2472 case CC0:
2473 return orig;
2474 default:
2475 break;
2476 }
2477
2478 copy = rtx_alloc (code);
2479 PUT_MODE (copy, GET_MODE (orig));
2480 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2481 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2482 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2483 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2484 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2485
2486 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2487
2488 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2489 {
2490 switch (*format_ptr++)
2491 {
2492 case 'e':
2493 XEXP (copy, i) = XEXP (orig, i);
2494 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2495 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2496 break;
2497
2498 case 'u':
2499 XEXP (copy, i) = XEXP (orig, i);
2500 break;
2501
2502 case 'E':
2503 case 'V':
2504 XVEC (copy, i) = XVEC (orig, i);
2505 if (XVEC (orig, i) != NULL)
2506 {
2507 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2508 for (j = 0; j < XVECLEN (copy, i); j++)
2509 XVECEXP (copy, i, j)
2510 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2511 }
2512 break;
2513
2514 case 'w':
2515 XWINT (copy, i) = XWINT (orig, i);
2516 break;
2517
2518 case 'n':
2519 case 'i':
2520 XINT (copy, i) = XINT (orig, i);
2521 break;
2522
2523 case 't':
2524 XTREE (copy, i) = XTREE (orig, i);
2525 break;
2526
2527 case 's':
2528 case 'S':
2529 XSTR (copy, i) = XSTR (orig, i);
2530 break;
2531
2532 case '0':
2533 X0ANY (copy, i) = X0ANY (orig, i);
2534 break;
2535
2536 default:
2537 abort ();
2538 }
2539 }
2540 return copy;
2541}
2542
|
2356/* Mark ORIG as in use, and return a copy of it if it was already in use. 2357 Recursively does the same for subexpressions. Uses 2358 copy_rtx_if_shared_1 to reduce stack space. */ 2359 2360rtx 2361copy_rtx_if_shared (rtx orig) 2362{ 2363 copy_rtx_if_shared_1 (&orig); 2364 return orig; 2365} 2366 2367/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in 2368 use. Recursively does the same for subexpressions. */ 2369 2370static void 2371copy_rtx_if_shared_1 (rtx *orig1) 2372{ 2373 rtx x; 2374 int i; 2375 enum rtx_code code; 2376 rtx *last_ptr; 2377 const char *format_ptr; 2378 int copied = 0; 2379 int length; 2380 2381 /* Repeat is used to turn tail-recursion into iteration. */ 2382repeat: 2383 x = *orig1; 2384 2385 if (x == 0) 2386 return; 2387 2388 code = GET_CODE (x); 2389 2390 /* These types may be freely shared. */ 2391 2392 switch (code) 2393 { 2394 case REG:
|
2582 case QUEUED:
|
2395 case CONST_INT: 2396 case CONST_DOUBLE: 2397 case CONST_VECTOR: 2398 case SYMBOL_REF: 2399 case LABEL_REF: 2400 case CODE_LABEL: 2401 case PC: 2402 case CC0: 2403 case SCRATCH: 2404 /* SCRATCH must be shared because they represent distinct values. */ 2405 return;
|
2406 case CLOBBER: 2407 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) 2408 return; 2409 break; |
2410 2411 case CONST: 2412 /* CONST can be shared if it contains a SYMBOL_REF. If it contains 2413 a LABEL_REF, it isn't sharable. */ 2414 if (GET_CODE (XEXP (x, 0)) == PLUS 2415 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 2416 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) 2417 return; 2418 break; 2419 2420 case INSN: 2421 case JUMP_INSN: 2422 case CALL_INSN: 2423 case NOTE: 2424 case BARRIER: 2425 /* The chain of insns is not being copied. */ 2426 return; 2427 2428 default: 2429 break; 2430 } 2431 2432 /* This rtx may not be shared. If it has already been seen, 2433 replace it with a copy of itself. */ 2434 2435 if (RTX_FLAG (x, used)) 2436 {
|
2621 rtx copy;
2622
2623 copy = rtx_alloc (code);
2624 memcpy (copy, x, RTX_SIZE (code));
2625 x = copy;
|
2437 x = shallow_copy_rtx (x); |
2438 copied = 1; 2439 } 2440 RTX_FLAG (x, used) = 1; 2441 2442 /* Now scan the subexpressions recursively. 2443 We can store any replaced subexpressions directly into X 2444 since we know X is not shared! Any vectors in X 2445 must be copied if X was copied. */ 2446 2447 format_ptr = GET_RTX_FORMAT (code); 2448 length = GET_RTX_LENGTH (code); 2449 last_ptr = NULL; 2450 2451 for (i = 0; i < length; i++) 2452 { 2453 switch (*format_ptr++) 2454 { 2455 case 'e': 2456 if (last_ptr) 2457 copy_rtx_if_shared_1 (last_ptr); 2458 last_ptr = &XEXP (x, i); 2459 break; 2460 2461 case 'E': 2462 if (XVEC (x, i) != NULL) 2463 { 2464 int j; 2465 int len = XVECLEN (x, i); 2466 2467 /* Copy the vector iff I copied the rtx and the length 2468 is nonzero. */ 2469 if (copied && len > 0) 2470 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem); 2471 2472 /* Call recursively on all inside the vector. */ 2473 for (j = 0; j < len; j++) 2474 { 2475 if (last_ptr) 2476 copy_rtx_if_shared_1 (last_ptr); 2477 last_ptr = &XVECEXP (x, i, j); 2478 } 2479 } 2480 break; 2481 } 2482 } 2483 *orig1 = x; 2484 if (last_ptr) 2485 { 2486 orig1 = last_ptr; 2487 goto repeat; 2488 } 2489 return; 2490} 2491 2492/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used 2493 to look for shared sub-parts. */ 2494 2495void 2496reset_used_flags (rtx x) 2497{ 2498 int i, j; 2499 enum rtx_code code; 2500 const char *format_ptr; 2501 int length; 2502 2503 /* Repeat is used to turn tail-recursion into iteration. */ 2504repeat: 2505 if (x == 0) 2506 return; 2507 2508 code = GET_CODE (x); 2509 2510 /* These types may be freely shared so we needn't do any resetting 2511 for them. */ 2512 2513 switch (code) 2514 { 2515 case REG:
|
2704 case QUEUED:
|
2516 case CONST_INT: 2517 case CONST_DOUBLE: 2518 case CONST_VECTOR: 2519 case SYMBOL_REF: 2520 case CODE_LABEL: 2521 case PC: 2522 case CC0: 2523 return; 2524 2525 case INSN: 2526 case JUMP_INSN: 2527 case CALL_INSN: 2528 case NOTE: 2529 case LABEL_REF: 2530 case BARRIER: 2531 /* The chain of insns is not being copied. */ 2532 return; 2533 2534 default: 2535 break; 2536 } 2537 2538 RTX_FLAG (x, used) = 0; 2539 2540 format_ptr = GET_RTX_FORMAT (code); 2541 length = GET_RTX_LENGTH (code); 2542 2543 for (i = 0; i < length; i++) 2544 { 2545 switch (*format_ptr++) 2546 { 2547 case 'e': 2548 if (i == length-1) 2549 { 2550 x = XEXP (x, i); 2551 goto repeat; 2552 } 2553 reset_used_flags (XEXP (x, i)); 2554 break; 2555 2556 case 'E': 2557 for (j = 0; j < XVECLEN (x, i); j++) 2558 reset_used_flags (XVECEXP (x, i, j)); 2559 break; 2560 } 2561 } 2562} 2563 2564/* Set all the USED bits in X to allow copy_rtx_if_shared to be used 2565 to look for shared sub-parts. */ 2566 2567void 2568set_used_flags (rtx x) 2569{ 2570 int i, j; 2571 enum rtx_code code; 2572 const char *format_ptr; 2573 2574 if (x == 0) 2575 return; 2576 2577 code = GET_CODE (x); 2578 2579 /* These types may be freely shared so we needn't do any resetting 2580 for them. */ 2581 2582 switch (code) 2583 { 2584 case REG:
|
2774 case QUEUED:
|
2585 case CONST_INT: 2586 case CONST_DOUBLE: 2587 case CONST_VECTOR: 2588 case SYMBOL_REF: 2589 case CODE_LABEL: 2590 case PC: 2591 case CC0: 2592 return; 2593 2594 case INSN: 2595 case JUMP_INSN: 2596 case CALL_INSN: 2597 case NOTE: 2598 case LABEL_REF: 2599 case BARRIER: 2600 /* The chain of insns is not being copied. */ 2601 return; 2602 2603 default: 2604 break; 2605 } 2606 2607 RTX_FLAG (x, used) = 1; 2608 2609 format_ptr = GET_RTX_FORMAT (code); 2610 for (i = 0; i < GET_RTX_LENGTH (code); i++) 2611 { 2612 switch (*format_ptr++) 2613 { 2614 case 'e': 2615 set_used_flags (XEXP (x, i)); 2616 break; 2617 2618 case 'E': 2619 for (j = 0; j < XVECLEN (x, i); j++) 2620 set_used_flags (XVECEXP (x, i, j)); 2621 break; 2622 } 2623 } 2624} 2625 2626/* Copy X if necessary so that it won't be altered by changes in OTHER. 2627 Return X or the rtx for the pseudo reg the value of X was copied into. 2628 OTHER must be valid as a SET_DEST. */ 2629 2630rtx 2631make_safe_from (rtx x, rtx other) 2632{ 2633 while (1) 2634 switch (GET_CODE (other)) 2635 { 2636 case SUBREG: 2637 other = SUBREG_REG (other); 2638 break; 2639 case STRICT_LOW_PART: 2640 case SIGN_EXTEND: 2641 case ZERO_EXTEND: 2642 other = XEXP (other, 0); 2643 break; 2644 default: 2645 goto done; 2646 } 2647 done:
|
2838 if ((GET_CODE (other) == MEM
|
2648 if ((MEM_P (other) |
2649 && ! CONSTANT_P (x)
|
2840 && GET_CODE (x) != REG
|
2650 && !REG_P (x) |
2651 && GET_CODE (x) != SUBREG)
|
2842 || (GET_CODE (other) == REG
|
2652 || (REG_P (other) |
2653 && (REGNO (other) < FIRST_PSEUDO_REGISTER 2654 || reg_mentioned_p (other, x)))) 2655 { 2656 rtx temp = gen_reg_rtx (GET_MODE (x)); 2657 emit_move_insn (temp, x); 2658 return temp; 2659 } 2660 return x; 2661} 2662 2663/* Emission of insns (adding them to the doubly-linked list). */ 2664 2665/* Return the first insn of the current sequence or current function. */ 2666 2667rtx 2668get_insns (void) 2669{ 2670 return first_insn; 2671} 2672 2673/* Specify a new insn as the first in the chain. */ 2674 2675void 2676set_first_insn (rtx insn) 2677{
|
2868 if (PREV_INSN (insn) != 0)
2869 abort ();
|
2678 gcc_assert (!PREV_INSN (insn)); |
2679 first_insn = insn; 2680} 2681 2682/* Return the last insn emitted in current sequence or current function. */ 2683 2684rtx 2685get_last_insn (void) 2686{ 2687 return last_insn; 2688} 2689 2690/* Specify a new insn as the last in the chain. */ 2691 2692void 2693set_last_insn (rtx insn) 2694{
|
2886 if (NEXT_INSN (insn) != 0)
2887 abort ();
|
2695 gcc_assert (!NEXT_INSN (insn)); |
2696 last_insn = insn; 2697} 2698 2699/* Return the last insn emitted, even if it is in a sequence now pushed. */ 2700 2701rtx 2702get_last_insn_anywhere (void) 2703{ 2704 struct sequence_stack *stack; 2705 if (last_insn) 2706 return last_insn; 2707 for (stack = seq_stack; stack; stack = stack->next) 2708 if (stack->last != 0) 2709 return stack->last; 2710 return 0; 2711} 2712 2713/* Return the first nonnote insn emitted in current sequence or current 2714 function. This routine looks inside SEQUENCEs. */ 2715 2716rtx 2717get_first_nonnote_insn (void) 2718{ 2719 rtx insn = first_insn; 2720 2721 if (insn) 2722 { 2723 if (NOTE_P (insn)) 2724 for (insn = next_insn (insn); 2725 insn && NOTE_P (insn); 2726 insn = next_insn (insn)) 2727 continue; 2728 else 2729 {
|
2922 if (GET_CODE (insn) == INSN
|
2730 if (NONJUMP_INSN_P (insn) |
2731 && GET_CODE (PATTERN (insn)) == SEQUENCE) 2732 insn = XVECEXP (PATTERN (insn), 0, 0); 2733 } 2734 } 2735 2736 return insn; 2737} 2738 2739/* Return the last nonnote insn emitted in current sequence or current 2740 function. This routine looks inside SEQUENCEs. */ 2741 2742rtx 2743get_last_nonnote_insn (void) 2744{ 2745 rtx insn = last_insn; 2746 2747 if (insn) 2748 { 2749 if (NOTE_P (insn)) 2750 for (insn = previous_insn (insn); 2751 insn && NOTE_P (insn); 2752 insn = previous_insn (insn)) 2753 continue; 2754 else 2755 {
|
2948 if (GET_CODE (insn) == INSN
|
2756 if (NONJUMP_INSN_P (insn) |
2757 && GET_CODE (PATTERN (insn)) == SEQUENCE) 2758 insn = XVECEXP (PATTERN (insn), 0, 2759 XVECLEN (PATTERN (insn), 0) - 1); 2760 } 2761 } 2762 2763 return insn; 2764} 2765 2766/* Return a number larger than any instruction's uid in this function. */ 2767 2768int 2769get_max_uid (void) 2770{ 2771 return cur_insn_uid; 2772} 2773 2774/* Renumber instructions so that no instruction UIDs are wasted. */ 2775 2776void
|
2969renumber_insns (FILE *stream)
|
2777renumber_insns (void) |
2778{ 2779 rtx insn; 2780 2781 /* If we're not supposed to renumber instructions, don't. */ 2782 if (!flag_renumber_insns) 2783 return; 2784 2785 /* If there aren't that many instructions, then it's not really 2786 worth renumbering them. */ 2787 if (flag_renumber_insns == 1 && get_max_uid () < 25000) 2788 return; 2789 2790 cur_insn_uid = 1; 2791 2792 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 2793 {
|
2986 if (stream)
2987 fprintf (stream, "Renumbering insn %d to %d\n",
|
2794 if (dump_file) 2795 fprintf (dump_file, "Renumbering insn %d to %d\n", |
2796 INSN_UID (insn), cur_insn_uid); 2797 INSN_UID (insn) = cur_insn_uid++; 2798 } 2799} 2800 2801/* Return the next insn. If it is a SEQUENCE, return the first insn 2802 of the sequence. */ 2803 2804rtx 2805next_insn (rtx insn) 2806{ 2807 if (insn) 2808 { 2809 insn = NEXT_INSN (insn);
|
3002 if (insn && GET_CODE (insn) == INSN
|
2810 if (insn && NONJUMP_INSN_P (insn) |
2811 && GET_CODE (PATTERN (insn)) == SEQUENCE) 2812 insn = XVECEXP (PATTERN (insn), 0, 0); 2813 } 2814 2815 return insn; 2816} 2817 2818/* Return the previous insn. If it is a SEQUENCE, return the last insn 2819 of the sequence. */ 2820 2821rtx 2822previous_insn (rtx insn) 2823{ 2824 if (insn) 2825 { 2826 insn = PREV_INSN (insn);
|
3019 if (insn && GET_CODE (insn) == INSN
|
2827 if (insn && NONJUMP_INSN_P (insn) |
2828 && GET_CODE (PATTERN (insn)) == SEQUENCE) 2829 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1); 2830 } 2831 2832 return insn; 2833} 2834 2835/* Return the next insn after INSN that is not a NOTE. This routine does not 2836 look inside SEQUENCEs. */ 2837 2838rtx 2839next_nonnote_insn (rtx insn) 2840{ 2841 while (insn) 2842 { 2843 insn = NEXT_INSN (insn);
|
3036 if (insn == 0 || GET_CODE (insn) != NOTE)
|
2844 if (insn == 0 || !NOTE_P (insn)) |
2845 break; 2846 } 2847 2848 return insn; 2849} 2850 2851/* Return the previous insn before INSN that is not a NOTE. This routine does 2852 not look inside SEQUENCEs. */ 2853 2854rtx 2855prev_nonnote_insn (rtx insn) 2856{ 2857 while (insn) 2858 { 2859 insn = PREV_INSN (insn);
|
3052 if (insn == 0 || GET_CODE (insn) != NOTE)
|
2860 if (insn == 0 || !NOTE_P (insn)) |
2861 break; 2862 } 2863 2864 return insn; 2865} 2866 2867/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN; 2868 or 0, if there is none. This routine does not look inside 2869 SEQUENCEs. */ 2870 2871rtx 2872next_real_insn (rtx insn) 2873{ 2874 while (insn) 2875 { 2876 insn = NEXT_INSN (insn);
|
3069 if (insn == 0 || GET_CODE (insn) == INSN
3070 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
|
2877 if (insn == 0 || INSN_P (insn)) |
2878 break; 2879 } 2880 2881 return insn; 2882} 2883 2884/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN; 2885 or 0, if there is none. This routine does not look inside 2886 SEQUENCEs. */ 2887 2888rtx 2889prev_real_insn (rtx insn) 2890{ 2891 while (insn) 2892 { 2893 insn = PREV_INSN (insn);
|
3087 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3088 || GET_CODE (insn) == JUMP_INSN)
|
2894 if (insn == 0 || INSN_P (insn)) |
2895 break; 2896 } 2897 2898 return insn; 2899} 2900 2901/* Return the last CALL_INSN in the current list, or 0 if there is none. 2902 This routine does not look inside SEQUENCEs. */ 2903 2904rtx 2905last_call_insn (void) 2906{ 2907 rtx insn; 2908 2909 for (insn = get_last_insn ();
|
3104 insn && GET_CODE (insn) != CALL_INSN;
|
2910 insn && !CALL_P (insn); |
2911 insn = PREV_INSN (insn)) 2912 ; 2913 2914 return insn; 2915} 2916 2917/* Find the next insn after INSN that really does something. This routine 2918 does not look inside SEQUENCEs. Until reload has completed, this is the 2919 same as next_real_insn. */ 2920 2921int 2922active_insn_p (rtx insn) 2923{
|
3118 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3119 || (GET_CODE (insn) == INSN
|
2924 return (CALL_P (insn) || JUMP_P (insn) 2925 || (NONJUMP_INSN_P (insn) |
2926 && (! reload_completed 2927 || (GET_CODE (PATTERN (insn)) != USE 2928 && GET_CODE (PATTERN (insn)) != CLOBBER)))); 2929} 2930 2931rtx 2932next_active_insn (rtx insn) 2933{ 2934 while (insn) 2935 { 2936 insn = NEXT_INSN (insn); 2937 if (insn == 0 || active_insn_p (insn)) 2938 break; 2939 } 2940 2941 return insn; 2942} 2943 2944/* Find the last insn before INSN that really does something. This routine 2945 does not look inside SEQUENCEs. Until reload has completed, this is the 2946 same as prev_real_insn. */ 2947 2948rtx 2949prev_active_insn (rtx insn) 2950{ 2951 while (insn) 2952 { 2953 insn = PREV_INSN (insn); 2954 if (insn == 0 || active_insn_p (insn)) 2955 break; 2956 } 2957 2958 return insn; 2959} 2960 2961/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */ 2962 2963rtx 2964next_label (rtx insn) 2965{ 2966 while (insn) 2967 { 2968 insn = NEXT_INSN (insn);
|
3163 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
|
2969 if (insn == 0 || LABEL_P (insn)) |
2970 break; 2971 } 2972 2973 return insn; 2974} 2975 2976/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */ 2977 2978rtx 2979prev_label (rtx insn) 2980{ 2981 while (insn) 2982 { 2983 insn = PREV_INSN (insn);
|
3178 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
|
2984 if (insn == 0 || LABEL_P (insn)) |
2985 break; 2986 } 2987 2988 return insn; 2989}
|
2990 2991/* Return the last label to mark the same position as LABEL. Return null 2992 if LABEL itself is null. */ 2993 2994rtx 2995skip_consecutive_labels (rtx label) 2996{ 2997 rtx insn; 2998 2999 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn)) 3000 if (LABEL_P (insn)) 3001 label = insn; 3002 3003 return label; 3004} |
3005 3006#ifdef HAVE_cc0 3007/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER 3008 and REG_CC_USER notes so we can find it. */ 3009 3010void 3011link_cc0_insns (rtx insn) 3012{ 3013 rtx user = next_nonnote_insn (insn); 3014
|
3194 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
|
3015 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE) |
3016 user = XVECEXP (PATTERN (user), 0, 0); 3017 3018 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn, 3019 REG_NOTES (user)); 3020 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn)); 3021} 3022 3023/* Return the next insn that uses CC0 after INSN, which is assumed to 3024 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter 3025 applied to the result of this function should yield INSN). 3026 3027 Normally, this is simply the next insn. However, if a REG_CC_USER note 3028 is present, it contains the insn that uses CC0. 3029 3030 Return 0 if we can't find the insn. */ 3031 3032rtx 3033next_cc0_user (rtx insn) 3034{ 3035 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX); 3036 3037 if (note) 3038 return XEXP (note, 0); 3039 3040 insn = next_nonnote_insn (insn);
|
3220 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
|
3041 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
3042 insn = XVECEXP (PATTERN (insn), 0, 0); 3043 3044 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn))) 3045 return insn; 3046 3047 return 0; 3048} 3049 3050/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER 3051 note, it is the previous insn. */ 3052 3053rtx 3054prev_cc0_setter (rtx insn) 3055{ 3056 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); 3057 3058 if (note) 3059 return XEXP (note, 0); 3060 3061 insn = prev_nonnote_insn (insn);
|
3241 if (! sets_cc0_p (PATTERN (insn)))
3242 abort ();
|
3062 gcc_assert (sets_cc0_p (PATTERN (insn))); |
3063 3064 return insn; 3065} 3066#endif 3067 3068/* Increment the label uses for all labels present in rtx. */ 3069 3070static void 3071mark_label_nuses (rtx x) 3072{ 3073 enum rtx_code code; 3074 int i, j; 3075 const char *fmt; 3076 3077 code = GET_CODE (x); 3078 if (code == LABEL_REF && LABEL_P (XEXP (x, 0))) 3079 LABEL_NUSES (XEXP (x, 0))++; 3080 3081 fmt = GET_RTX_FORMAT (code); 3082 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 3083 { 3084 if (fmt[i] == 'e') 3085 mark_label_nuses (XEXP (x, i)); 3086 else if (fmt[i] == 'E') 3087 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 3088 mark_label_nuses (XVECEXP (x, i, j)); 3089 } 3090} 3091 3092 3093/* Try splitting insns that can be split for better scheduling. 3094 PAT is the pattern which might split. 3095 TRIAL is the insn providing PAT. 3096 LAST is nonzero if we should return the last insn of the sequence produced. 3097 3098 If this routine succeeds in splitting, it returns the first or last 3099 replacement insn depending on the value of LAST. Otherwise, it 3100 returns TRIAL. If the insn to be returned can be split, it will be. */ 3101 3102rtx 3103try_split (rtx pat, rtx trial, int last) 3104{ 3105 rtx before = PREV_INSN (trial); 3106 rtx after = NEXT_INSN (trial); 3107 int has_barrier = 0; 3108 rtx tem; 3109 rtx note, seq; 3110 int probability; 3111 rtx insn_last, insn; 3112 int njumps = 0; 3113 3114 if (any_condjump_p (trial) 3115 && (note = find_reg_note (trial, REG_BR_PROB, 0))) 3116 split_branch_probability = INTVAL (XEXP (note, 0)); 3117 probability = split_branch_probability; 3118 3119 seq = split_insns (pat, trial); 3120 3121 split_branch_probability = -1; 3122 3123 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER. 3124 We may need to handle this specially. */
|
3305 if (after && GET_CODE (after) == BARRIER)
|
3125 if (after && BARRIER_P (after)) |
3126 { 3127 has_barrier = 1; 3128 after = NEXT_INSN (after); 3129 } 3130 3131 if (!seq) 3132 return trial; 3133 3134 /* Avoid infinite loop if any insn of the result matches 3135 the original pattern. */ 3136 insn_last = seq; 3137 while (1) 3138 { 3139 if (INSN_P (insn_last) 3140 && rtx_equal_p (PATTERN (insn_last), pat)) 3141 return trial; 3142 if (!NEXT_INSN (insn_last)) 3143 break; 3144 insn_last = NEXT_INSN (insn_last); 3145 } 3146 3147 /* Mark labels. */ 3148 for (insn = insn_last; insn ; insn = PREV_INSN (insn)) 3149 {
|
3330 if (GET_CODE (insn) == JUMP_INSN)
|
3150 if (JUMP_P (insn)) |
3151 { 3152 mark_jump_label (PATTERN (insn), insn, 0); 3153 njumps++; 3154 if (probability != -1 3155 && any_condjump_p (insn) 3156 && !find_reg_note (insn, REG_BR_PROB, 0)) 3157 { 3158 /* We can preserve the REG_BR_PROB notes only if exactly 3159 one jump is created, otherwise the machine description 3160 is responsible for this step using 3161 split_branch_probability variable. */
|
3342 if (njumps != 1)
3343 abort ();
|
3162 gcc_assert (njumps == 1); |
3163 REG_NOTES (insn) 3164 = gen_rtx_EXPR_LIST (REG_BR_PROB, 3165 GEN_INT (probability), 3166 REG_NOTES (insn)); 3167 } 3168 } 3169 } 3170 3171 /* If we are splitting a CALL_INSN, look for the CALL_INSN 3172 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
|
3354 if (GET_CODE (trial) == CALL_INSN)
|
3173 if (CALL_P (trial)) |
3174 { 3175 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
|
3357 if (GET_CODE (insn) == CALL_INSN)
|
3176 if (CALL_P (insn)) |
3177 { 3178 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn); 3179 while (*p) 3180 p = &XEXP (*p, 1); 3181 *p = CALL_INSN_FUNCTION_USAGE (trial); 3182 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); 3183 } 3184 } 3185 3186 /* Copy notes, particularly those related to the CFG. */ 3187 for (note = REG_NOTES (trial); note; note = XEXP (note, 1)) 3188 { 3189 switch (REG_NOTE_KIND (note)) 3190 { 3191 case REG_EH_REGION: 3192 insn = insn_last; 3193 while (insn != NULL_RTX) 3194 {
|
3376 if (GET_CODE (insn) == CALL_INSN
3377 || (flag_non_call_exceptions
|
3195 if (CALL_P (insn) 3196 || (flag_non_call_exceptions && INSN_P (insn) |
3197 && may_trap_p (PATTERN (insn)))) 3198 REG_NOTES (insn) 3199 = gen_rtx_EXPR_LIST (REG_EH_REGION, 3200 XEXP (note, 0), 3201 REG_NOTES (insn)); 3202 insn = PREV_INSN (insn); 3203 } 3204 break; 3205 3206 case REG_NORETURN: 3207 case REG_SETJMP:
|
3389 case REG_ALWAYS_RETURN:
|
3208 insn = insn_last; 3209 while (insn != NULL_RTX) 3210 {
|
3393 if (GET_CODE (insn) == CALL_INSN)
|
3211 if (CALL_P (insn)) |
3212 REG_NOTES (insn) 3213 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note), 3214 XEXP (note, 0), 3215 REG_NOTES (insn)); 3216 insn = PREV_INSN (insn); 3217 } 3218 break; 3219 3220 case REG_NON_LOCAL_GOTO: 3221 insn = insn_last; 3222 while (insn != NULL_RTX) 3223 {
|
3406 if (GET_CODE (insn) == JUMP_INSN)
|
3224 if (JUMP_P (insn)) |
3225 REG_NOTES (insn) 3226 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note), 3227 XEXP (note, 0), 3228 REG_NOTES (insn)); 3229 insn = PREV_INSN (insn); 3230 } 3231 break; 3232 3233 default: 3234 break; 3235 } 3236 } 3237 3238 /* If there are LABELS inside the split insns increment the 3239 usage count so we don't delete the label. */
|
3422 if (GET_CODE (trial) == INSN)
|
3240 if (NONJUMP_INSN_P (trial)) |
3241 { 3242 insn = insn_last; 3243 while (insn != NULL_RTX) 3244 {
|
3427 if (GET_CODE (insn) == INSN)
|
3245 if (NONJUMP_INSN_P (insn)) |
3246 mark_label_nuses (PATTERN (insn)); 3247 3248 insn = PREV_INSN (insn); 3249 } 3250 } 3251 3252 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial)); 3253 3254 delete_insn (trial); 3255 if (has_barrier) 3256 emit_barrier_after (tem); 3257 3258 /* Recursively call try_split for each new insn created; by the 3259 time control returns here that insn will be fully split, so 3260 set LAST and continue from the insn after the one returned. 3261 We can't use next_active_insn here since AFTER may be a note. 3262 Ignore deleted insns, which can be occur if not optimizing. */ 3263 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem)) 3264 if (! INSN_DELETED_P (tem) && INSN_P (tem)) 3265 tem = try_split (PATTERN (tem), tem, 1); 3266 3267 /* Return either the first or the last insn, depending on which was 3268 requested. */ 3269 return last 3270 ? (after ? PREV_INSN (after) : last_insn) 3271 : NEXT_INSN (before); 3272} 3273 3274/* Make and return an INSN rtx, initializing all its slots. 3275 Store PATTERN in the pattern slots. */ 3276 3277rtx 3278make_insn_raw (rtx pattern) 3279{ 3280 rtx insn; 3281 3282 insn = rtx_alloc (INSN); 3283 3284 INSN_UID (insn) = cur_insn_uid++; 3285 PATTERN (insn) = pattern; 3286 INSN_CODE (insn) = -1; 3287 LOG_LINKS (insn) = NULL; 3288 REG_NOTES (insn) = NULL; 3289 INSN_LOCATOR (insn) = 0; 3290 BLOCK_FOR_INSN (insn) = NULL; 3291 3292#ifdef ENABLE_RTL_CHECKING 3293 if (insn 3294 && INSN_P (insn) 3295 && (returnjump_p (insn) 3296 || (GET_CODE (insn) == SET 3297 && SET_DEST (insn) == pc_rtx))) 3298 {
|
3481 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
|
3299 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n"); |
3300 debug_rtx (insn); 3301 } 3302#endif 3303 3304 return insn; 3305} 3306 3307/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */ 3308
|
3491static rtx
|
3309rtx |
3310make_jump_insn_raw (rtx pattern) 3311{ 3312 rtx insn; 3313 3314 insn = rtx_alloc (JUMP_INSN); 3315 INSN_UID (insn) = cur_insn_uid++; 3316 3317 PATTERN (insn) = pattern; 3318 INSN_CODE (insn) = -1; 3319 LOG_LINKS (insn) = NULL; 3320 REG_NOTES (insn) = NULL; 3321 JUMP_LABEL (insn) = NULL; 3322 INSN_LOCATOR (insn) = 0; 3323 BLOCK_FOR_INSN (insn) = NULL; 3324 3325 return insn; 3326} 3327 3328/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */ 3329 3330static rtx 3331make_call_insn_raw (rtx pattern) 3332{ 3333 rtx insn; 3334 3335 insn = rtx_alloc (CALL_INSN); 3336 INSN_UID (insn) = cur_insn_uid++; 3337 3338 PATTERN (insn) = pattern; 3339 INSN_CODE (insn) = -1; 3340 LOG_LINKS (insn) = NULL; 3341 REG_NOTES (insn) = NULL; 3342 CALL_INSN_FUNCTION_USAGE (insn) = NULL; 3343 INSN_LOCATOR (insn) = 0; 3344 BLOCK_FOR_INSN (insn) = NULL; 3345 3346 return insn; 3347} 3348 3349/* Add INSN to the end of the doubly-linked list. 3350 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */ 3351 3352void 3353add_insn (rtx insn) 3354{ 3355 PREV_INSN (insn) = last_insn; 3356 NEXT_INSN (insn) = 0; 3357 3358 if (NULL != last_insn) 3359 NEXT_INSN (last_insn) = insn; 3360 3361 if (NULL == first_insn) 3362 first_insn = insn; 3363 3364 last_insn = insn; 3365} 3366 3367/* Add INSN into the doubly-linked list after insn AFTER. This and 3368 the next should be the only functions called to insert an insn once 3369 delay slots have been filled since only they know how to update a 3370 SEQUENCE. */ 3371 3372void 3373add_insn_after (rtx insn, rtx after) 3374{ 3375 rtx next = NEXT_INSN (after); 3376 basic_block bb; 3377
|
3560 if (optimize && INSN_DELETED_P (after))
3561 abort ();
|
3378 gcc_assert (!optimize || !INSN_DELETED_P (after)); |
3379 3380 NEXT_INSN (insn) = next; 3381 PREV_INSN (insn) = after; 3382 3383 if (next) 3384 { 3385 PREV_INSN (next) = insn;
|
3569 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
|
3386 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
3387 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn; 3388 } 3389 else if (last_insn == after) 3390 last_insn = insn; 3391 else 3392 { 3393 struct sequence_stack *stack = seq_stack; 3394 /* Scan all pending sequences too. */ 3395 for (; stack; stack = stack->next) 3396 if (after == stack->last) 3397 { 3398 stack->last = insn; 3399 break; 3400 } 3401
|
3585 if (stack == 0)
3586 abort ();
|
3402 gcc_assert (stack); |
3403 } 3404
|
3589 if (GET_CODE (after) != BARRIER
3590 && GET_CODE (insn) != BARRIER
|
3405 if (!BARRIER_P (after) 3406 && !BARRIER_P (insn) |
3407 && (bb = BLOCK_FOR_INSN (after))) 3408 { 3409 set_block_for_insn (insn, bb); 3410 if (INSN_P (insn)) 3411 bb->flags |= BB_DIRTY; 3412 /* Should not happen as first in the BB is always 3413 either NOTE or LABEL. */ 3414 if (BB_END (bb) == after 3415 /* Avoid clobbering of structure when creating new BB. */
|
3600 && GET_CODE (insn) != BARRIER
3601 && (GET_CODE (insn) != NOTE
|
3416 && !BARRIER_P (insn) 3417 && (!NOTE_P (insn) |
3418 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)) 3419 BB_END (bb) = insn; 3420 } 3421 3422 NEXT_INSN (after) = insn;
|
3607 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
|
3423 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE) |
3424 { 3425 rtx sequence = PATTERN (after); 3426 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn; 3427 } 3428} 3429 3430/* Add INSN into the doubly-linked list before insn BEFORE. This and 3431 the previous should be the only functions called to insert an insn once 3432 delay slots have been filled since only they know how to update a 3433 SEQUENCE. */ 3434 3435void 3436add_insn_before (rtx insn, rtx before) 3437{ 3438 rtx prev = PREV_INSN (before); 3439 basic_block bb; 3440
|
3625 if (optimize && INSN_DELETED_P (before))
3626 abort ();
|
3441 gcc_assert (!optimize || !INSN_DELETED_P (before)); |
3442 3443 PREV_INSN (insn) = prev; 3444 NEXT_INSN (insn) = before; 3445 3446 if (prev) 3447 { 3448 NEXT_INSN (prev) = insn;
|
3634 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
|
3449 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
3450 { 3451 rtx sequence = PATTERN (prev); 3452 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn; 3453 } 3454 } 3455 else if (first_insn == before) 3456 first_insn = insn; 3457 else 3458 { 3459 struct sequence_stack *stack = seq_stack; 3460 /* Scan all pending sequences too. */ 3461 for (; stack; stack = stack->next) 3462 if (before == stack->first) 3463 { 3464 stack->first = insn; 3465 break; 3466 } 3467
|
3653 if (stack == 0)
3654 abort ();
|
3468 gcc_assert (stack); |
3469 } 3470
|
3657 if (GET_CODE (before) != BARRIER
3658 && GET_CODE (insn) != BARRIER
|
3471 if (!BARRIER_P (before) 3472 && !BARRIER_P (insn) |
3473 && (bb = BLOCK_FOR_INSN (before))) 3474 { 3475 set_block_for_insn (insn, bb); 3476 if (INSN_P (insn)) 3477 bb->flags |= BB_DIRTY;
|
3664 /* Should not happen as first in the BB is always
3665 either NOTE or LABEl. */
3666 if (BB_HEAD (bb) == insn
3667 /* Avoid clobbering of structure when creating new BB. */
3668 && GET_CODE (insn) != BARRIER
3669 && (GET_CODE (insn) != NOTE
3670 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3671 abort ();
|
3478 /* Should not happen as first in the BB is always either NOTE or 3479 LABEL. */ 3480 gcc_assert (BB_HEAD (bb) != insn 3481 /* Avoid clobbering of structure when creating new BB. */ 3482 || BARRIER_P (insn) 3483 || (NOTE_P (insn) 3484 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)); |
3485 } 3486 3487 PREV_INSN (before) = insn;
|
3675 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
|
3488 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE) |
3489 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn; 3490} 3491 3492/* Remove an insn from its doubly-linked list. This function knows how 3493 to handle sequences. */ 3494void 3495remove_insn (rtx insn) 3496{ 3497 rtx next = NEXT_INSN (insn); 3498 rtx prev = PREV_INSN (insn); 3499 basic_block bb; 3500 3501 if (prev) 3502 { 3503 NEXT_INSN (prev) = next;
|
3691 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
|
3504 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
3505 { 3506 rtx sequence = PATTERN (prev); 3507 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next; 3508 } 3509 } 3510 else if (first_insn == insn) 3511 first_insn = next; 3512 else 3513 { 3514 struct sequence_stack *stack = seq_stack; 3515 /* Scan all pending sequences too. */ 3516 for (; stack; stack = stack->next) 3517 if (insn == stack->first) 3518 { 3519 stack->first = next; 3520 break; 3521 } 3522
|
3710 if (stack == 0)
3711 abort ();
|
3523 gcc_assert (stack); |
3524 } 3525 3526 if (next) 3527 { 3528 PREV_INSN (next) = prev;
|
3717 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
|
3529 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
3530 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev; 3531 } 3532 else if (last_insn == insn) 3533 last_insn = prev; 3534 else 3535 { 3536 struct sequence_stack *stack = seq_stack; 3537 /* Scan all pending sequences too. */ 3538 for (; stack; stack = stack->next) 3539 if (insn == stack->last) 3540 { 3541 stack->last = prev; 3542 break; 3543 } 3544
|
3733 if (stack == 0)
3734 abort ();
|
3545 gcc_assert (stack); |
3546 }
|
3736 if (GET_CODE (insn) != BARRIER
|
3547 if (!BARRIER_P (insn) |
3548 && (bb = BLOCK_FOR_INSN (insn))) 3549 { 3550 if (INSN_P (insn)) 3551 bb->flags |= BB_DIRTY; 3552 if (BB_HEAD (bb) == insn) 3553 { 3554 /* Never ever delete the basic block note without deleting whole 3555 basic block. */
|
3745 if (GET_CODE (insn) == NOTE)
3746 abort ();
|
3556 gcc_assert (!NOTE_P (insn)); |
3557 BB_HEAD (bb) = next; 3558 } 3559 if (BB_END (bb) == insn) 3560 BB_END (bb) = prev; 3561 } 3562} 3563 3564/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */ 3565 3566void 3567add_function_usage_to (rtx call_insn, rtx call_fusage) 3568{
|
3759 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3760 abort ();
|
3569 gcc_assert (call_insn && CALL_P (call_insn)); |
3570 3571 /* Put the register usage information on the CALL. If there is already 3572 some usage information, put ours at the end. */ 3573 if (CALL_INSN_FUNCTION_USAGE (call_insn)) 3574 { 3575 rtx link; 3576 3577 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; 3578 link = XEXP (link, 1)) 3579 ; 3580 3581 XEXP (link, 1) = call_fusage; 3582 } 3583 else 3584 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; 3585} 3586 3587/* Delete all insns made since FROM. 3588 FROM becomes the new last instruction. */ 3589 3590void 3591delete_insns_since (rtx from) 3592{ 3593 if (from == 0) 3594 first_insn = 0; 3595 else 3596 NEXT_INSN (from) = 0; 3597 last_insn = from; 3598} 3599 3600/* This function is deprecated, please use sequences instead. 3601 3602 Move a consecutive bunch of insns to a different place in the chain. 3603 The insns to be moved are those between FROM and TO. 3604 They are moved to a new position after the insn AFTER. 3605 AFTER must not be FROM or TO or any insn in between. 3606 3607 This function does not know about SEQUENCEs and hence should not be 3608 called after delay-slot filling has been done. */ 3609 3610void 3611reorder_insns_nobb (rtx from, rtx to, rtx after) 3612{ 3613 /* Splice this bunch out of where it is now. */ 3614 if (PREV_INSN (from)) 3615 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to); 3616 if (NEXT_INSN (to)) 3617 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from); 3618 if (last_insn == to) 3619 last_insn = PREV_INSN (from); 3620 if (first_insn == from) 3621 first_insn = NEXT_INSN (to); 3622 3623 /* Make the new neighbors point to it and it to them. */ 3624 if (NEXT_INSN (after)) 3625 PREV_INSN (NEXT_INSN (after)) = to; 3626 3627 NEXT_INSN (to) = NEXT_INSN (after); 3628 PREV_INSN (from) = after; 3629 NEXT_INSN (after) = from; 3630 if (after == last_insn) 3631 last_insn = to; 3632} 3633 3634/* Same as function above, but take care to update BB boundaries. */ 3635void 3636reorder_insns (rtx from, rtx to, rtx after) 3637{ 3638 rtx prev = PREV_INSN (from); 3639 basic_block bb, bb2; 3640 3641 reorder_insns_nobb (from, to, after); 3642
|
3834 if (GET_CODE (after) != BARRIER
|
3643 if (!BARRIER_P (after) |
3644 && (bb = BLOCK_FOR_INSN (after))) 3645 { 3646 rtx x; 3647 bb->flags |= BB_DIRTY; 3648
|
3840 if (GET_CODE (from) != BARRIER
|
3649 if (!BARRIER_P (from) |
3650 && (bb2 = BLOCK_FOR_INSN (from))) 3651 { 3652 if (BB_END (bb2) == to) 3653 BB_END (bb2) = prev; 3654 bb2->flags |= BB_DIRTY; 3655 } 3656 3657 if (BB_END (bb) == after) 3658 BB_END (bb) = to; 3659 3660 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
|
3852 set_block_for_insn (x, bb);
|
3661 if (!BARRIER_P (x)) 3662 set_block_for_insn (x, bb); |
3663 } 3664} 3665 3666/* Return the line note insn preceding INSN. */ 3667 3668static rtx 3669find_line_note (rtx insn) 3670{ 3671 if (no_line_numbers) 3672 return 0; 3673 3674 for (; insn; insn = PREV_INSN (insn))
|
3865 if (GET_CODE (insn) == NOTE
|
3675 if (NOTE_P (insn) |
3676 && NOTE_LINE_NUMBER (insn) >= 0) 3677 break; 3678 3679 return insn; 3680} 3681
|
3872/* Like reorder_insns, but inserts line notes to preserve the line numbers
3873 of the moved insns when debugging. This may insert a note between AFTER
3874 and FROM, and another one after TO. */
3875
3876void
3877reorder_insns_with_line_notes (rtx from, rtx to, rtx after)
3878{
3879 rtx from_line = find_line_note (from);
3880 rtx after_line = find_line_note (after);
3881
3882 reorder_insns (from, to, after);
3883
3884 if (from_line == after_line)
3885 return;
3886
3887 if (from_line)
3888 emit_note_copy_after (from_line, after);
3889 if (after_line)
3890 emit_note_copy_after (after_line, to);
3891}
3892
3893/* Remove unnecessary notes from the instruction stream. */
3894
3895void
3896remove_unnecessary_notes (void)
3897{
3898 rtx block_stack = NULL_RTX;
3899 rtx eh_stack = NULL_RTX;
3900 rtx insn;
3901 rtx next;
3902 rtx tmp;
3903
3904 /* We must not remove the first instruction in the function because
3905 the compiler depends on the first instruction being a note. */
3906 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3907 {
3908 /* Remember what's next. */
3909 next = NEXT_INSN (insn);
3910
3911 /* We're only interested in notes. */
3912 if (GET_CODE (insn) != NOTE)
3913 continue;
3914
3915 switch (NOTE_LINE_NUMBER (insn))
3916 {
3917 case NOTE_INSN_DELETED:
3918 case NOTE_INSN_LOOP_END_TOP_COND:
3919 remove_insn (insn);
3920 break;
3921
3922 case NOTE_INSN_EH_REGION_BEG:
3923 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3924 break;
3925
3926 case NOTE_INSN_EH_REGION_END:
3927 /* Too many end notes. */
3928 if (eh_stack == NULL_RTX)
3929 abort ();
3930 /* Mismatched nesting. */
3931 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3932 abort ();
3933 tmp = eh_stack;
3934 eh_stack = XEXP (eh_stack, 1);
3935 free_INSN_LIST_node (tmp);
3936 break;
3937
3938 case NOTE_INSN_BLOCK_BEG:
3939 /* By now, all notes indicating lexical blocks should have
3940 NOTE_BLOCK filled in. */
3941 if (NOTE_BLOCK (insn) == NULL_TREE)
3942 abort ();
3943 block_stack = alloc_INSN_LIST (insn, block_stack);
3944 break;
3945
3946 case NOTE_INSN_BLOCK_END:
3947 /* Too many end notes. */
3948 if (block_stack == NULL_RTX)
3949 abort ();
3950 /* Mismatched nesting. */
3951 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3952 abort ();
3953 tmp = block_stack;
3954 block_stack = XEXP (block_stack, 1);
3955 free_INSN_LIST_node (tmp);
3956
3957 /* Scan back to see if there are any non-note instructions
3958 between INSN and the beginning of this block. If not,
3959 then there is no PC range in the generated code that will
3960 actually be in this block, so there's no point in
3961 remembering the existence of the block. */
3962 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3963 {
3964 /* This block contains a real instruction. Note that we
3965 don't include labels; if the only thing in the block
3966 is a label, then there are still no PC values that
3967 lie within the block. */
3968 if (INSN_P (tmp))
3969 break;
3970
3971 /* We're only interested in NOTEs. */
3972 if (GET_CODE (tmp) != NOTE)
3973 continue;
3974
3975 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3976 {
3977 /* We just verified that this BLOCK matches us with
3978 the block_stack check above. Never delete the
3979 BLOCK for the outermost scope of the function; we
3980 can refer to names from that scope even if the
3981 block notes are messed up. */
3982 if (! is_body_block (NOTE_BLOCK (insn))
3983 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3984 {
3985 remove_insn (tmp);
3986 remove_insn (insn);
3987 }
3988 break;
3989 }
3990 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3991 /* There's a nested block. We need to leave the
3992 current block in place since otherwise the debugger
3993 wouldn't be able to show symbols from our block in
3994 the nested block. */
3995 break;
3996 }
3997 }
3998 }
3999
4000 /* Too many begin notes. */
4001 if (block_stack || eh_stack)
4002 abort ();
4003}
4004
|
3682 3683/* Emit insn(s) of given code and pattern 3684 at a specified place within the doubly-linked list. 3685 3686 All of the emit_foo global entry points accept an object 3687 X which is either an insn list or a PATTERN of a single 3688 instruction. 3689 3690 There are thus a few canonical ways to generate code and 3691 emit it at a specific place in the instruction stream. For 3692 example, consider the instruction named SPOT and the fact that 3693 we would like to emit some instructions before SPOT. We might 3694 do it like this: 3695 3696 start_sequence (); 3697 ... emit the new instructions ... 3698 insns_head = get_insns (); 3699 end_sequence (); 3700 3701 emit_insn_before (insns_head, SPOT); 3702 3703 It used to be common to generate SEQUENCE rtl instead, but that 3704 is a relic of the past which no longer occurs. The reason is that 3705 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE 3706 generated would almost certainly die right after it was created. */ 3707 3708/* Make X be output before the instruction BEFORE. */ 3709 3710rtx 3711emit_insn_before_noloc (rtx x, rtx before) 3712{ 3713 rtx last = before; 3714 rtx insn; 3715
|
4039#ifdef ENABLE_RTL_CHECKING
4040 if (before == NULL_RTX)
4041 abort ();
4042#endif
|
3716 gcc_assert (before); |
3717 3718 if (x == NULL_RTX) 3719 return last; 3720 3721 switch (GET_CODE (x)) 3722 { 3723 case INSN: 3724 case JUMP_INSN: 3725 case CALL_INSN: 3726 case CODE_LABEL: 3727 case BARRIER: 3728 case NOTE: 3729 insn = x; 3730 while (insn) 3731 { 3732 rtx next = NEXT_INSN (insn); 3733 add_insn_before (insn, before); 3734 last = insn; 3735 insn = next; 3736 } 3737 break; 3738 3739#ifdef ENABLE_RTL_CHECKING 3740 case SEQUENCE:
|
4067 abort ();
|
3741 gcc_unreachable (); |
3742 break; 3743#endif 3744 3745 default: 3746 last = make_insn_raw (x); 3747 add_insn_before (last, before); 3748 break; 3749 } 3750 3751 return last; 3752} 3753 3754/* Make an instruction with body X and code JUMP_INSN 3755 and output it before the instruction BEFORE. */ 3756 3757rtx 3758emit_jump_insn_before_noloc (rtx x, rtx before) 3759{ 3760 rtx insn, last = NULL_RTX; 3761
|
4088#ifdef ENABLE_RTL_CHECKING
4089 if (before == NULL_RTX)
4090 abort ();
4091#endif
|
3762 gcc_assert (before); |
3763 3764 switch (GET_CODE (x)) 3765 { 3766 case INSN: 3767 case JUMP_INSN: 3768 case CALL_INSN: 3769 case CODE_LABEL: 3770 case BARRIER: 3771 case NOTE: 3772 insn = x; 3773 while (insn) 3774 { 3775 rtx next = NEXT_INSN (insn); 3776 add_insn_before (insn, before); 3777 last = insn; 3778 insn = next; 3779 } 3780 break; 3781 3782#ifdef ENABLE_RTL_CHECKING 3783 case SEQUENCE:
|
4113 abort ();
|
3784 gcc_unreachable (); |
3785 break; 3786#endif 3787 3788 default: 3789 last = make_jump_insn_raw (x); 3790 add_insn_before (last, before); 3791 break; 3792 } 3793 3794 return last; 3795} 3796 3797/* Make an instruction with body X and code CALL_INSN 3798 and output it before the instruction BEFORE. */ 3799 3800rtx 3801emit_call_insn_before_noloc (rtx x, rtx before) 3802{ 3803 rtx last = NULL_RTX, insn; 3804
|
4134#ifdef ENABLE_RTL_CHECKING
4135 if (before == NULL_RTX)
4136 abort ();
4137#endif
|
3805 gcc_assert (before); |
3806 3807 switch (GET_CODE (x)) 3808 { 3809 case INSN: 3810 case JUMP_INSN: 3811 case CALL_INSN: 3812 case CODE_LABEL: 3813 case BARRIER: 3814 case NOTE: 3815 insn = x; 3816 while (insn) 3817 { 3818 rtx next = NEXT_INSN (insn); 3819 add_insn_before (insn, before); 3820 last = insn; 3821 insn = next; 3822 } 3823 break; 3824 3825#ifdef ENABLE_RTL_CHECKING 3826 case SEQUENCE:
|
4159 abort ();
|
3827 gcc_unreachable (); |
3828 break; 3829#endif 3830 3831 default: 3832 last = make_call_insn_raw (x); 3833 add_insn_before (last, before); 3834 break; 3835 } 3836 3837 return last; 3838} 3839 3840/* Make an insn of code BARRIER 3841 and output it before the insn BEFORE. */ 3842 3843rtx 3844emit_barrier_before (rtx before) 3845{ 3846 rtx insn = rtx_alloc (BARRIER); 3847 3848 INSN_UID (insn) = cur_insn_uid++; 3849 3850 add_insn_before (insn, before); 3851 return insn; 3852} 3853 3854/* Emit the label LABEL before the insn BEFORE. */ 3855 3856rtx 3857emit_label_before (rtx label, rtx before) 3858{ 3859 /* This can be called twice for the same label as a result of the 3860 confusion that follows a syntax error! So make it harmless. */ 3861 if (INSN_UID (label) == 0) 3862 { 3863 INSN_UID (label) = cur_insn_uid++; 3864 add_insn_before (label, before); 3865 } 3866 3867 return label; 3868} 3869 3870/* Emit a note of subtype SUBTYPE before the insn BEFORE. */ 3871 3872rtx 3873emit_note_before (int subtype, rtx before) 3874{ 3875 rtx note = rtx_alloc (NOTE); 3876 INSN_UID (note) = cur_insn_uid++;
|
3877#ifndef USE_MAPPED_LOCATION |
3878 NOTE_SOURCE_FILE (note) = 0;
|
3879#endif |
3880 NOTE_LINE_NUMBER (note) = subtype; 3881 BLOCK_FOR_INSN (note) = NULL; 3882 3883 add_insn_before (note, before); 3884 return note; 3885} 3886 3887/* Helper for emit_insn_after, handles lists of instructions 3888 efficiently. */ 3889 3890static rtx emit_insn_after_1 (rtx, rtx); 3891 3892static rtx 3893emit_insn_after_1 (rtx first, rtx after) 3894{ 3895 rtx last; 3896 rtx after_after; 3897 basic_block bb; 3898
|
4229 if (GET_CODE (after) != BARRIER
|
3899 if (!BARRIER_P (after) |
3900 && (bb = BLOCK_FOR_INSN (after))) 3901 { 3902 bb->flags |= BB_DIRTY; 3903 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
|
4234 if (GET_CODE (last) != BARRIER)
|
3904 if (!BARRIER_P (last)) |
3905 set_block_for_insn (last, bb);
|
4236 if (GET_CODE (last) != BARRIER)
|
3906 if (!BARRIER_P (last)) |
3907 set_block_for_insn (last, bb); 3908 if (BB_END (bb) == after) 3909 BB_END (bb) = last; 3910 } 3911 else 3912 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) 3913 continue; 3914 3915 after_after = NEXT_INSN (after); 3916 3917 NEXT_INSN (after) = first; 3918 PREV_INSN (first) = after; 3919 NEXT_INSN (last) = after_after; 3920 if (after_after) 3921 PREV_INSN (after_after) = last; 3922 3923 if (after == last_insn) 3924 last_insn = last; 3925 return last; 3926} 3927 3928/* Make X be output after the insn AFTER. */ 3929 3930rtx 3931emit_insn_after_noloc (rtx x, rtx after) 3932{ 3933 rtx last = after; 3934
|
4265#ifdef ENABLE_RTL_CHECKING
4266 if (after == NULL_RTX)
4267 abort ();
4268#endif
|
3935 gcc_assert (after); |
3936 3937 if (x == NULL_RTX) 3938 return last; 3939 3940 switch (GET_CODE (x)) 3941 { 3942 case INSN: 3943 case JUMP_INSN: 3944 case CALL_INSN: 3945 case CODE_LABEL: 3946 case BARRIER: 3947 case NOTE: 3948 last = emit_insn_after_1 (x, after); 3949 break; 3950 3951#ifdef ENABLE_RTL_CHECKING 3952 case SEQUENCE:
|
4286 abort ();
|
3953 gcc_unreachable (); |
3954 break; 3955#endif 3956 3957 default: 3958 last = make_insn_raw (x); 3959 add_insn_after (last, after); 3960 break; 3961 } 3962 3963 return last; 3964} 3965 3966/* Similar to emit_insn_after, except that line notes are to be inserted so 3967 as to act as if this insn were at FROM. */ 3968 3969void 3970emit_insn_after_with_line_notes (rtx x, rtx after, rtx from) 3971{ 3972 rtx from_line = find_line_note (from); 3973 rtx after_line = find_line_note (after); 3974 rtx insn = emit_insn_after (x, after); 3975 3976 if (from_line) 3977 emit_note_copy_after (from_line, after); 3978 3979 if (after_line) 3980 emit_note_copy_after (after_line, insn); 3981} 3982 3983/* Make an insn of code JUMP_INSN with body X 3984 and output it after the insn AFTER. */ 3985 3986rtx 3987emit_jump_insn_after_noloc (rtx x, rtx after) 3988{ 3989 rtx last; 3990
|
4324#ifdef ENABLE_RTL_CHECKING
4325 if (after == NULL_RTX)
4326 abort ();
4327#endif
|
3991 gcc_assert (after); |
3992 3993 switch (GET_CODE (x)) 3994 { 3995 case INSN: 3996 case JUMP_INSN: 3997 case CALL_INSN: 3998 case CODE_LABEL: 3999 case BARRIER: 4000 case NOTE: 4001 last = emit_insn_after_1 (x, after); 4002 break; 4003 4004#ifdef ENABLE_RTL_CHECKING 4005 case SEQUENCE:
|
4342 abort ();
|
4006 gcc_unreachable (); |
4007 break; 4008#endif 4009 4010 default: 4011 last = make_jump_insn_raw (x); 4012 add_insn_after (last, after); 4013 break; 4014 } 4015 4016 return last; 4017} 4018 4019/* Make an instruction with body X and code CALL_INSN 4020 and output it after the instruction AFTER. */ 4021 4022rtx 4023emit_call_insn_after_noloc (rtx x, rtx after) 4024{ 4025 rtx last; 4026
|
4363#ifdef ENABLE_RTL_CHECKING
4364 if (after == NULL_RTX)
4365 abort ();
4366#endif
|
4027 gcc_assert (after); |
4028 4029 switch (GET_CODE (x)) 4030 { 4031 case INSN: 4032 case JUMP_INSN: 4033 case CALL_INSN: 4034 case CODE_LABEL: 4035 case BARRIER: 4036 case NOTE: 4037 last = emit_insn_after_1 (x, after); 4038 break; 4039 4040#ifdef ENABLE_RTL_CHECKING 4041 case SEQUENCE:
|
4381 abort ();
|
4042 gcc_unreachable (); |
4043 break; 4044#endif 4045 4046 default: 4047 last = make_call_insn_raw (x); 4048 add_insn_after (last, after); 4049 break; 4050 } 4051 4052 return last; 4053} 4054 4055/* Make an insn of code BARRIER 4056 and output it after the insn AFTER. */ 4057 4058rtx 4059emit_barrier_after (rtx after) 4060{ 4061 rtx insn = rtx_alloc (BARRIER); 4062 4063 INSN_UID (insn) = cur_insn_uid++; 4064 4065 add_insn_after (insn, after); 4066 return insn; 4067} 4068 4069/* Emit the label LABEL after the insn AFTER. */ 4070 4071rtx 4072emit_label_after (rtx label, rtx after) 4073{ 4074 /* This can be called twice for the same label 4075 as a result of the confusion that follows a syntax error! 4076 So make it harmless. */ 4077 if (INSN_UID (label) == 0) 4078 { 4079 INSN_UID (label) = cur_insn_uid++; 4080 add_insn_after (label, after); 4081 } 4082 4083 return label; 4084} 4085 4086/* Emit a note of subtype SUBTYPE after the insn AFTER. */ 4087 4088rtx 4089emit_note_after (int subtype, rtx after) 4090{ 4091 rtx note = rtx_alloc (NOTE); 4092 INSN_UID (note) = cur_insn_uid++;
|
4093#ifndef USE_MAPPED_LOCATION |
4094 NOTE_SOURCE_FILE (note) = 0;
|
4095#endif |
4096 NOTE_LINE_NUMBER (note) = subtype; 4097 BLOCK_FOR_INSN (note) = NULL; 4098 add_insn_after (note, after); 4099 return note; 4100} 4101 4102/* Emit a copy of note ORIG after the insn AFTER. */ 4103 4104rtx 4105emit_note_copy_after (rtx orig, rtx after) 4106{ 4107 rtx note; 4108 4109 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers) 4110 { 4111 cur_insn_uid++; 4112 return 0; 4113 } 4114 4115 note = rtx_alloc (NOTE); 4116 INSN_UID (note) = cur_insn_uid++; 4117 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig); 4118 NOTE_DATA (note) = NOTE_DATA (orig); 4119 BLOCK_FOR_INSN (note) = NULL; 4120 add_insn_after (note, after); 4121 return note; 4122} 4123 4124/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ 4125rtx 4126emit_insn_after_setloc (rtx pattern, rtx after, int loc) 4127{ 4128 rtx last = emit_insn_after_noloc (pattern, after); 4129 4130 if (pattern == NULL_RTX || !loc) 4131 return last; 4132 4133 after = NEXT_INSN (after); 4134 while (1) 4135 { 4136 if (active_insn_p (after) && !INSN_LOCATOR (after)) 4137 INSN_LOCATOR (after) = loc; 4138 if (after == last) 4139 break; 4140 after = NEXT_INSN (after); 4141 } 4142 return last; 4143} 4144 4145/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */ 4146rtx 4147emit_insn_after (rtx pattern, rtx after) 4148{ 4149 if (INSN_P (after)) 4150 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); 4151 else 4152 return emit_insn_after_noloc (pattern, after); 4153} 4154 4155/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ 4156rtx 4157emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc) 4158{ 4159 rtx last = emit_jump_insn_after_noloc (pattern, after); 4160 4161 if (pattern == NULL_RTX || !loc) 4162 return last; 4163 4164 after = NEXT_INSN (after); 4165 while (1) 4166 { 4167 if (active_insn_p (after) && !INSN_LOCATOR (after)) 4168 INSN_LOCATOR (after) = loc; 4169 if (after == last) 4170 break; 4171 after = NEXT_INSN (after); 4172 } 4173 return last; 4174} 4175 4176/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */ 4177rtx 4178emit_jump_insn_after (rtx pattern, rtx after) 4179{ 4180 if (INSN_P (after)) 4181 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); 4182 else 4183 return emit_jump_insn_after_noloc (pattern, after); 4184} 4185 4186/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ 4187rtx 4188emit_call_insn_after_setloc (rtx pattern, rtx after, int loc) 4189{ 4190 rtx last = emit_call_insn_after_noloc (pattern, after); 4191 4192 if (pattern == NULL_RTX || !loc) 4193 return last; 4194 4195 after = NEXT_INSN (after); 4196 while (1) 4197 { 4198 if (active_insn_p (after) && !INSN_LOCATOR (after)) 4199 INSN_LOCATOR (after) = loc; 4200 if (after == last) 4201 break; 4202 after = NEXT_INSN (after); 4203 } 4204 return last; 4205} 4206 4207/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */ 4208rtx 4209emit_call_insn_after (rtx pattern, rtx after) 4210{ 4211 if (INSN_P (after)) 4212 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); 4213 else 4214 return emit_call_insn_after_noloc (pattern, after); 4215} 4216 4217/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */ 4218rtx 4219emit_insn_before_setloc (rtx pattern, rtx before, int loc) 4220{ 4221 rtx first = PREV_INSN (before); 4222 rtx last = emit_insn_before_noloc (pattern, before); 4223 4224 if (pattern == NULL_RTX || !loc) 4225 return last; 4226 4227 first = NEXT_INSN (first); 4228 while (1) 4229 { 4230 if (active_insn_p (first) && !INSN_LOCATOR (first)) 4231 INSN_LOCATOR (first) = loc; 4232 if (first == last) 4233 break; 4234 first = NEXT_INSN (first); 4235 } 4236 return last; 4237} 4238 4239/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */ 4240rtx 4241emit_insn_before (rtx pattern, rtx before) 4242{ 4243 if (INSN_P (before)) 4244 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); 4245 else 4246 return emit_insn_before_noloc (pattern, before); 4247} 4248 4249/* like emit_insn_before_noloc, but set insn_locator according to scope. */ 4250rtx 4251emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc) 4252{ 4253 rtx first = PREV_INSN (before); 4254 rtx last = emit_jump_insn_before_noloc (pattern, before); 4255 4256 if (pattern == NULL_RTX) 4257 return last; 4258 4259 first = NEXT_INSN (first); 4260 while (1) 4261 { 4262 if (active_insn_p (first) && !INSN_LOCATOR (first)) 4263 INSN_LOCATOR (first) = loc; 4264 if (first == last) 4265 break; 4266 first = NEXT_INSN (first); 4267 } 4268 return last; 4269} 4270 4271/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */ 4272rtx 4273emit_jump_insn_before (rtx pattern, rtx before) 4274{ 4275 if (INSN_P (before)) 4276 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); 4277 else 4278 return emit_jump_insn_before_noloc (pattern, before); 4279} 4280 4281/* like emit_insn_before_noloc, but set insn_locator according to scope. */ 4282rtx 4283emit_call_insn_before_setloc (rtx pattern, rtx before, int loc) 4284{ 4285 rtx first = PREV_INSN (before); 4286 rtx last = emit_call_insn_before_noloc (pattern, before); 4287 4288 if (pattern == NULL_RTX) 4289 return last; 4290 4291 first = NEXT_INSN (first); 4292 while (1) 4293 { 4294 if (active_insn_p (first) && !INSN_LOCATOR (first)) 4295 INSN_LOCATOR (first) = loc; 4296 if (first == last) 4297 break; 4298 first = NEXT_INSN (first); 4299 } 4300 return last; 4301} 4302 4303/* like emit_call_insn_before_noloc, 4304 but set insn_locator according to before. */ 4305rtx 4306emit_call_insn_before (rtx pattern, rtx before) 4307{ 4308 if (INSN_P (before)) 4309 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); 4310 else 4311 return emit_call_insn_before_noloc (pattern, before); 4312} 4313 4314/* Take X and emit it at the end of the doubly-linked 4315 INSN list. 4316 4317 Returns the last insn emitted. */ 4318 4319rtx 4320emit_insn (rtx x) 4321{ 4322 rtx last = last_insn; 4323 rtx insn; 4324 4325 if (x == NULL_RTX) 4326 return last; 4327 4328 switch (GET_CODE (x)) 4329 { 4330 case INSN: 4331 case JUMP_INSN: 4332 case CALL_INSN: 4333 case CODE_LABEL: 4334 case BARRIER: 4335 case NOTE: 4336 insn = x; 4337 while (insn) 4338 { 4339 rtx next = NEXT_INSN (insn); 4340 add_insn (insn); 4341 last = insn; 4342 insn = next; 4343 } 4344 break; 4345 4346#ifdef ENABLE_RTL_CHECKING 4347 case SEQUENCE:
|
4685 abort ();
|
4348 gcc_unreachable (); |
4349 break; 4350#endif 4351 4352 default: 4353 last = make_insn_raw (x); 4354 add_insn (last); 4355 break; 4356 } 4357 4358 return last; 4359} 4360 4361/* Make an insn of code JUMP_INSN with pattern X 4362 and add it to the end of the doubly-linked list. */ 4363 4364rtx 4365emit_jump_insn (rtx x) 4366{ 4367 rtx last = NULL_RTX, insn; 4368 4369 switch (GET_CODE (x)) 4370 { 4371 case INSN: 4372 case JUMP_INSN: 4373 case CALL_INSN: 4374 case CODE_LABEL: 4375 case BARRIER: 4376 case NOTE: 4377 insn = x; 4378 while (insn) 4379 { 4380 rtx next = NEXT_INSN (insn); 4381 add_insn (insn); 4382 last = insn; 4383 insn = next; 4384 } 4385 break; 4386 4387#ifdef ENABLE_RTL_CHECKING 4388 case SEQUENCE:
|
4726 abort ();
|
4389 gcc_unreachable (); |
4390 break; 4391#endif 4392 4393 default: 4394 last = make_jump_insn_raw (x); 4395 add_insn (last); 4396 break; 4397 } 4398 4399 return last; 4400} 4401 4402/* Make an insn of code CALL_INSN with pattern X 4403 and add it to the end of the doubly-linked list. */ 4404 4405rtx 4406emit_call_insn (rtx x) 4407{ 4408 rtx insn; 4409 4410 switch (GET_CODE (x)) 4411 { 4412 case INSN: 4413 case JUMP_INSN: 4414 case CALL_INSN: 4415 case CODE_LABEL: 4416 case BARRIER: 4417 case NOTE: 4418 insn = emit_insn (x); 4419 break; 4420 4421#ifdef ENABLE_RTL_CHECKING 4422 case SEQUENCE:
|
4760 abort ();
|
4423 gcc_unreachable (); |
4424 break; 4425#endif 4426 4427 default: 4428 insn = make_call_insn_raw (x); 4429 add_insn (insn); 4430 break; 4431 } 4432 4433 return insn; 4434} 4435 4436/* Add the label LABEL to the end of the doubly-linked list. */ 4437 4438rtx 4439emit_label (rtx label) 4440{ 4441 /* This can be called twice for the same label 4442 as a result of the confusion that follows a syntax error! 4443 So make it harmless. */ 4444 if (INSN_UID (label) == 0) 4445 { 4446 INSN_UID (label) = cur_insn_uid++; 4447 add_insn (label); 4448 } 4449 return label; 4450} 4451 4452/* Make an insn of code BARRIER 4453 and add it to the end of the doubly-linked list. */ 4454 4455rtx 4456emit_barrier (void) 4457{ 4458 rtx barrier = rtx_alloc (BARRIER); 4459 INSN_UID (barrier) = cur_insn_uid++; 4460 add_insn (barrier); 4461 return barrier; 4462} 4463 4464/* Make line numbering NOTE insn for LOCATION add it to the end 4465 of the doubly-linked list, but only if line-numbers are desired for 4466 debugging info and it doesn't match the previous one. */ 4467 4468rtx 4469emit_line_note (location_t location) 4470{ 4471 rtx note; 4472
|
4810 set_file_and_line_for_stmt (location);
4811
|
4473#ifdef USE_MAPPED_LOCATION 4474 if (location == last_location) 4475 return NULL_RTX; 4476#else |
4477 if (location.file && last_location.file 4478 && !strcmp (location.file, last_location.file) 4479 && location.line == last_location.line) 4480 return NULL_RTX;
|
4481#endif |
4482 last_location = location; 4483 4484 if (no_line_numbers) 4485 { 4486 cur_insn_uid++; 4487 return NULL_RTX; 4488 } 4489
|
4490#ifdef USE_MAPPED_LOCATION 4491 note = emit_note ((int) location); 4492#else |
4493 note = emit_note (location.line); 4494 NOTE_SOURCE_FILE (note) = location.file;
|
4495#endif |
4496 4497 return note; 4498} 4499 4500/* Emit a copy of note ORIG. */ 4501 4502rtx 4503emit_note_copy (rtx orig) 4504{ 4505 rtx note; 4506 4507 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers) 4508 { 4509 cur_insn_uid++; 4510 return NULL_RTX; 4511 } 4512 4513 note = rtx_alloc (NOTE); 4514 4515 INSN_UID (note) = cur_insn_uid++; 4516 NOTE_DATA (note) = NOTE_DATA (orig); 4517 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig); 4518 BLOCK_FOR_INSN (note) = NULL; 4519 add_insn (note); 4520 4521 return note; 4522} 4523 4524/* Make an insn of code NOTE or type NOTE_NO 4525 and add it to the end of the doubly-linked list. */ 4526 4527rtx 4528emit_note (int note_no) 4529{ 4530 rtx note; 4531 4532 note = rtx_alloc (NOTE); 4533 INSN_UID (note) = cur_insn_uid++; 4534 NOTE_LINE_NUMBER (note) = note_no; 4535 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); 4536 BLOCK_FOR_INSN (note) = NULL; 4537 add_insn (note); 4538 return note; 4539} 4540 4541/* Cause next statement to emit a line note even if the line number 4542 has not changed. */ 4543 4544void 4545force_next_line_note (void) 4546{
|
4547#ifdef USE_MAPPED_LOCATION 4548 last_location = -1; 4549#else |
4550 last_location.line = -1;
|
4551#endif |
4552} 4553 4554/* Place a note of KIND on insn INSN with DATUM as the datum. If a 4555 note of this type already exists, remove it first. */ 4556 4557rtx 4558set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) 4559{ 4560 rtx note = find_reg_note (insn, kind, NULL_RTX); 4561 4562 switch (kind) 4563 { 4564 case REG_EQUAL: 4565 case REG_EQUIV: 4566 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn 4567 has multiple sets (some callers assume single_set 4568 means the insn only has one set, when in fact it 4569 means the insn only has one * useful * set). */ 4570 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) 4571 {
|
4898 if (note)
4899 abort ();
|
4572 gcc_assert (!note); |
4573 return NULL_RTX; 4574 } 4575 4576 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes. 4577 It serves no useful purpose and breaks eliminate_regs. */ 4578 if (GET_CODE (datum) == ASM_OPERANDS) 4579 return NULL_RTX; 4580 break; 4581 4582 default: 4583 break; 4584 } 4585 4586 if (note) 4587 { 4588 XEXP (note, 0) = datum; 4589 return note; 4590 } 4591 4592 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn)); 4593 return REG_NOTES (insn); 4594} 4595 4596/* Return an indication of which type of insn should have X as a body. 4597 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */ 4598
|
4926enum rtx_code
|
4599static enum rtx_code |
4600classify_insn (rtx x) 4601{
|
4929 if (GET_CODE (x) == CODE_LABEL)
|
4602 if (LABEL_P (x)) |
4603 return CODE_LABEL; 4604 if (GET_CODE (x) == CALL) 4605 return CALL_INSN; 4606 if (GET_CODE (x) == RETURN) 4607 return JUMP_INSN; 4608 if (GET_CODE (x) == SET) 4609 { 4610 if (SET_DEST (x) == pc_rtx) 4611 return JUMP_INSN; 4612 else if (GET_CODE (SET_SRC (x)) == CALL) 4613 return CALL_INSN; 4614 else 4615 return INSN; 4616 } 4617 if (GET_CODE (x) == PARALLEL) 4618 { 4619 int j; 4620 for (j = XVECLEN (x, 0) - 1; j >= 0; j--) 4621 if (GET_CODE (XVECEXP (x, 0, j)) == CALL) 4622 return CALL_INSN; 4623 else if (GET_CODE (XVECEXP (x, 0, j)) == SET 4624 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx) 4625 return JUMP_INSN; 4626 else if (GET_CODE (XVECEXP (x, 0, j)) == SET 4627 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL) 4628 return CALL_INSN; 4629 } 4630 return INSN; 4631} 4632 4633/* Emit the rtl pattern X as an appropriate kind of insn. 4634 If X is a label, it is simply added into the insn chain. */ 4635 4636rtx 4637emit (rtx x) 4638{ 4639 enum rtx_code code = classify_insn (x); 4640
|
4968 if (code == CODE_LABEL)
4969 return emit_label (x);
4970 else if (code == INSN)
4971 return emit_insn (x);
4972 else if (code == JUMP_INSN)
|
4641 switch (code) |
4642 {
|
4974 rtx insn = emit_jump_insn (x);
4975 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4976 return emit_barrier ();
4977 return insn;
|
4643 case CODE_LABEL: 4644 return emit_label (x); 4645 case INSN: 4646 return emit_insn (x); 4647 case JUMP_INSN: 4648 { 4649 rtx insn = emit_jump_insn (x); 4650 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN) 4651 return emit_barrier (); 4652 return insn; 4653 } 4654 case CALL_INSN: 4655 return emit_call_insn (x); 4656 default: 4657 gcc_unreachable (); |
4658 }
|
4979 else if (code == CALL_INSN)
4980 return emit_call_insn (x);
4981 else
4982 abort ();
|
4659} 4660 4661/* Space for free sequence stack entries. */
|
4986static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
|
4662static GTY ((deletable)) struct sequence_stack *free_sequence_stack; |
4663
|
4988/* Begin emitting insns to a sequence which can be packaged in an
4989 RTL_EXPR. If this sequence will contain something that might cause
4990 the compiler to pop arguments to function calls (because those
4991 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4992 details), use do_pending_stack_adjust before calling this function.
4993 That will ensure that the deferred pops are not accidentally
4994 emitted in the middle of this sequence. */
|
4664/* Begin emitting insns to a sequence. If this sequence will contain 4665 something that might cause the compiler to pop arguments to function 4666 calls (because those pops have previously been deferred; see 4667 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust 4668 before calling this function. That will ensure that the deferred 4669 pops are not accidentally emitted in the middle of this sequence. */ |
4670 4671void 4672start_sequence (void) 4673{ 4674 struct sequence_stack *tem; 4675 4676 if (free_sequence_stack != NULL) 4677 { 4678 tem = free_sequence_stack; 4679 free_sequence_stack = tem->next; 4680 } 4681 else 4682 tem = ggc_alloc (sizeof (struct sequence_stack)); 4683 4684 tem->next = seq_stack; 4685 tem->first = first_insn; 4686 tem->last = last_insn;
|
5012 tem->sequence_rtl_expr = seq_rtl_expr;
|
4687 4688 seq_stack = tem; 4689 4690 first_insn = 0; 4691 last_insn = 0; 4692} 4693
|
5020/* Similarly, but indicate that this sequence will be placed in T, an
5021 RTL_EXPR. See the documentation for start_sequence for more
5022 information about how to use this function. */
5023
5024void
5025start_sequence_for_rtl_expr (tree t)
5026{
5027 start_sequence ();
5028
5029 seq_rtl_expr = t;
5030}
5031
|
4694/* Set up the insn chain starting with FIRST as the current sequence, 4695 saving the previously current one. See the documentation for 4696 start_sequence for more information about how to use this function. */ 4697 4698void 4699push_to_sequence (rtx first) 4700{ 4701 rtx last; 4702 4703 start_sequence (); 4704 4705 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last)); 4706 4707 first_insn = first; 4708 last_insn = last; 4709} 4710
|
5049/* Set up the insn chain from a chain stort in FIRST to LAST. */
5050
5051void
5052push_to_full_sequence (rtx first, rtx last)
5053{
5054 start_sequence ();
5055 first_insn = first;
5056 last_insn = last;
5057 /* We really should have the end of the insn chain here. */
5058 if (last && NEXT_INSN (last))
5059 abort ();
5060}
5061
|
4711/* Set up the outer-level insn chain 4712 as the current sequence, saving the previously current one. */ 4713 4714void 4715push_topmost_sequence (void) 4716{ 4717 struct sequence_stack *stack, *top = NULL; 4718 4719 start_sequence (); 4720 4721 for (stack = seq_stack; stack; stack = stack->next) 4722 top = stack; 4723 4724 first_insn = top->first; 4725 last_insn = top->last;
|
5077 seq_rtl_expr = top->sequence_rtl_expr;
|
4726} 4727 4728/* After emitting to the outer-level insn chain, update the outer-level 4729 insn chain, and restore the previous saved state. */ 4730 4731void 4732pop_topmost_sequence (void) 4733{ 4734 struct sequence_stack *stack, *top = NULL; 4735 4736 for (stack = seq_stack; stack; stack = stack->next) 4737 top = stack; 4738 4739 top->first = first_insn; 4740 top->last = last_insn;
|
5093 /* ??? Why don't we save seq_rtl_expr here? */
|
4741 4742 end_sequence (); 4743} 4744 4745/* After emitting to a sequence, restore previous saved state. 4746 4747 To get the contents of the sequence just made, you must call 4748 `get_insns' *before* calling here. 4749 4750 If the compiler might have deferred popping arguments while 4751 generating this sequence, and this sequence will not be immediately 4752 inserted into the instruction stream, use do_pending_stack_adjust 4753 before calling get_insns. That will ensure that the deferred 4754 pops are inserted into this sequence, and not into some random 4755 location in the instruction stream. See INHIBIT_DEFER_POP for more 4756 information about deferred popping of arguments. */ 4757 4758void 4759end_sequence (void) 4760{ 4761 struct sequence_stack *tem = seq_stack; 4762 4763 first_insn = tem->first; 4764 last_insn = tem->last;
|
5118 seq_rtl_expr = tem->sequence_rtl_expr;
|
4765 seq_stack = tem->next; 4766 4767 memset (tem, 0, sizeof (*tem)); 4768 tem->next = free_sequence_stack; 4769 free_sequence_stack = tem; 4770} 4771
|
5126/* This works like end_sequence, but records the old sequence in FIRST
5127 and LAST. */
5128
5129void
5130end_full_sequence (rtx *first, rtx *last)
5131{
5132 *first = first_insn;
5133 *last = last_insn;
5134 end_sequence ();
5135}
5136
|
4772/* Return 1 if currently emitting into a sequence. */ 4773 4774int 4775in_sequence_p (void) 4776{ 4777 return seq_stack != 0; 4778} 4779 4780/* Put the various virtual registers into REGNO_REG_RTX. */ 4781
|
5147void
|
4782static void |
4783init_virtual_regs (struct emit_status *es) 4784{ 4785 rtx *ptr = es->x_regno_reg_rtx; 4786 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx; 4787 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx; 4788 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx; 4789 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx; 4790 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx; 4791} 4792 4793 4794/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */ 4795static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS]; 4796static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS]; 4797static int copy_insn_n_scratches; 4798 4799/* When an insn is being copied by copy_insn_1, this is nonzero if we have 4800 copied an ASM_OPERANDS. 4801 In that case, it is the original input-operand vector. */ 4802static rtvec orig_asm_operands_vector; 4803 4804/* When an insn is being copied by copy_insn_1, this is nonzero if we have 4805 copied an ASM_OPERANDS. 4806 In that case, it is the copied input-operand vector. */ 4807static rtvec copy_asm_operands_vector; 4808 4809/* Likewise for the constraints vector. */ 4810static rtvec orig_asm_constraints_vector; 4811static rtvec copy_asm_constraints_vector; 4812 4813/* Recursively create a new copy of an rtx for copy_insn. 4814 This function differs from copy_rtx in that it handles SCRATCHes and 4815 ASM_OPERANDs properly. 4816 Normally, this function is not used directly; use copy_insn as front end. 4817 However, you could first copy an insn pattern with copy_insn and then use 4818 this function afterwards to properly copy any REG_NOTEs containing 4819 SCRATCHes. */ 4820 4821rtx 4822copy_insn_1 (rtx orig) 4823{ 4824 rtx copy; 4825 int i, j; 4826 RTX_CODE code; 4827 const char *format_ptr; 4828 4829 code = GET_CODE (orig); 4830 4831 switch (code) 4832 { 4833 case REG:
|
5199 case QUEUED:
|
4834 case CONST_INT: 4835 case CONST_DOUBLE: 4836 case CONST_VECTOR: 4837 case SYMBOL_REF: 4838 case CODE_LABEL: 4839 case PC: 4840 case CC0:
|
5207 case ADDRESSOF:
|
4841 return orig;
|
4842 case CLOBBER: 4843 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER) 4844 return orig; 4845 break; |
4846 4847 case SCRATCH: 4848 for (i = 0; i < copy_insn_n_scratches; i++) 4849 if (copy_insn_scratch_in[i] == orig) 4850 return copy_insn_scratch_out[i]; 4851 break; 4852 4853 case CONST: 4854 /* CONST can be shared if it contains a SYMBOL_REF. If it contains 4855 a LABEL_REF, it isn't sharable. */ 4856 if (GET_CODE (XEXP (orig, 0)) == PLUS 4857 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF 4858 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT) 4859 return orig; 4860 break; 4861 4862 /* A MEM with a constant address is not sharable. The problem is that 4863 the constant address may need to be reloaded. If the mem is shared, 4864 then reloading one copy of this mem will cause all copies to appear 4865 to have been reloaded. */ 4866 4867 default: 4868 break; 4869 } 4870
|
5234 copy = rtx_alloc (code);
5235
5236 /* Copy the various flags, and other information. We assume that
5237 all fields need copying, and then clear the fields that should
|
4871 /* Copy the various flags, fields, and other information. We assume 4872 that all fields need copying, and then clear the fields that should |
4873 not be copied. That is the sensible default behavior, and forces 4874 us to explicitly document why we are *not* copying a flag. */
|
5240 memcpy (copy, orig, RTX_HDR_SIZE);
|
4875 copy = shallow_copy_rtx (orig); |
4876 4877 /* We do not copy the USED flag, which is used as a mark bit during 4878 walks over the RTL. */ 4879 RTX_FLAG (copy, used) = 0; 4880 4881 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
|
5247 if (GET_RTX_CLASS (code) == 'i')
|
4882 if (INSN_P (orig)) |
4883 { 4884 RTX_FLAG (copy, jump) = 0; 4885 RTX_FLAG (copy, call) = 0; 4886 RTX_FLAG (copy, frame_related) = 0; 4887 } 4888 4889 format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); 4890 4891 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
|
5257 {
5258 copy->u.fld[i] = orig->u.fld[i];
5259 switch (*format_ptr++)
5260 {
5261 case 'e':
5262 if (XEXP (orig, i) != NULL)
5263 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5264 break;
|
4892 switch (*format_ptr++) 4893 { 4894 case 'e': 4895 if (XEXP (orig, i) != NULL) 4896 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i)); 4897 break; |
4898
|
5266 case 'E':
5267 case 'V':
5268 if (XVEC (orig, i) == orig_asm_constraints_vector)
5269 XVEC (copy, i) = copy_asm_constraints_vector;
5270 else if (XVEC (orig, i) == orig_asm_operands_vector)
5271 XVEC (copy, i) = copy_asm_operands_vector;
5272 else if (XVEC (orig, i) != NULL)
5273 {
5274 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5275 for (j = 0; j < XVECLEN (copy, i); j++)
5276 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5277 }
5278 break;
|
4899 case 'E': 4900 case 'V': 4901 if (XVEC (orig, i) == orig_asm_constraints_vector) 4902 XVEC (copy, i) = copy_asm_constraints_vector; 4903 else if (XVEC (orig, i) == orig_asm_operands_vector) 4904 XVEC (copy, i) = copy_asm_operands_vector; 4905 else if (XVEC (orig, i) != NULL) 4906 { 4907 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); 4908 for (j = 0; j < XVECLEN (copy, i); j++) 4909 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j)); 4910 } 4911 break; |
4912
|
5280 case 't':
5281 case 'w':
5282 case 'i':
5283 case 's':
5284 case 'S':
5285 case 'u':
5286 case '0':
5287 /* These are left unchanged. */
5288 break;
|
4913 case 't': 4914 case 'w': 4915 case 'i': 4916 case 's': 4917 case 'S': 4918 case 'u': 4919 case '0': 4920 /* These are left unchanged. */ 4921 break; |
4922
|
5290 default:
5291 abort ();
5292 }
5293 }
|
4923 default: 4924 gcc_unreachable (); 4925 } |
4926 4927 if (code == SCRATCH) 4928 { 4929 i = copy_insn_n_scratches++;
|
5298 if (i >= MAX_RECOG_OPERANDS)
5299 abort ();
|
4930 gcc_assert (i < MAX_RECOG_OPERANDS); |
4931 copy_insn_scratch_in[i] = orig; 4932 copy_insn_scratch_out[i] = copy; 4933 } 4934 else if (code == ASM_OPERANDS) 4935 { 4936 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig); 4937 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy); 4938 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig); 4939 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy); 4940 } 4941 4942 return copy; 4943} 4944 4945/* Create a new copy of an rtx. 4946 This function differs from copy_rtx in that it handles SCRATCHes and 4947 ASM_OPERANDs properly. 4948 INSN doesn't really have to be a full INSN; it could be just the 4949 pattern. */ 4950rtx 4951copy_insn (rtx insn) 4952{ 4953 copy_insn_n_scratches = 0; 4954 orig_asm_operands_vector = 0; 4955 orig_asm_constraints_vector = 0; 4956 copy_asm_operands_vector = 0; 4957 copy_asm_constraints_vector = 0; 4958 return copy_insn_1 (insn); 4959} 4960 4961/* Initialize data structures and variables in this file 4962 before generating rtl for each function. */ 4963 4964void 4965init_emit (void) 4966{ 4967 struct function *f = cfun; 4968 4969 f->emit = ggc_alloc (sizeof (struct emit_status)); 4970 first_insn = NULL; 4971 last_insn = NULL;
|
5341 seq_rtl_expr = NULL;
|
4972 cur_insn_uid = 1; 4973 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
|
5344 last_location.line = 0;
5345 last_location.file = 0;
|
4974 last_location = UNKNOWN_LOCATION; |
4975 first_label_num = label_num;
|
5347 last_label_num = 0;
|
4976 seq_stack = NULL; 4977 4978 /* Init the tables that describe all the pseudo regs. */ 4979 4980 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101; 4981 4982 f->emit->regno_pointer_align 4983 = ggc_alloc_cleared (f->emit->regno_pointer_align_length 4984 * sizeof (unsigned char)); 4985 4986 regno_reg_rtx 4987 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx)); 4988 4989 /* Put copies of all the hard registers into regno_reg_rtx. */ 4990 memcpy (regno_reg_rtx, 4991 static_regno_reg_rtx, 4992 FIRST_PSEUDO_REGISTER * sizeof (rtx)); 4993 4994 /* Put copies of all the virtual register rtx into regno_reg_rtx. */ 4995 init_virtual_regs (f->emit); 4996 4997 /* Indicate that the virtual registers and stack locations are 4998 all pointers. */ 4999 REG_POINTER (stack_pointer_rtx) = 1; 5000 REG_POINTER (frame_pointer_rtx) = 1; 5001 REG_POINTER (hard_frame_pointer_rtx) = 1; 5002 REG_POINTER (arg_pointer_rtx) = 1; 5003 5004 REG_POINTER (virtual_incoming_args_rtx) = 1; 5005 REG_POINTER (virtual_stack_vars_rtx) = 1; 5006 REG_POINTER (virtual_stack_dynamic_rtx) = 1; 5007 REG_POINTER (virtual_outgoing_args_rtx) = 1; 5008 REG_POINTER (virtual_cfa_rtx) = 1; 5009 5010#ifdef STACK_BOUNDARY 5011 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY; 5012 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY; 5013 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY; 5014 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY; 5015 5016 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY; 5017 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY; 5018 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY; 5019 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY; 5020 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD; 5021#endif 5022 5023#ifdef INIT_EXPANDERS 5024 INIT_EXPANDERS; 5025#endif 5026} 5027
|
5400/* Generate the constant 0. */
|
5028/* Generate a vector constant for mode MODE and constant value CONSTANT. */ |
5029 5030static rtx
|
5403gen_const_vector_0 (enum machine_mode mode)
|
5031gen_const_vector (enum machine_mode mode, int constant) |
5032{ 5033 rtx tem; 5034 rtvec v; 5035 int units, i; 5036 enum machine_mode inner; 5037 5038 units = GET_MODE_NUNITS (mode); 5039 inner = GET_MODE_INNER (mode); 5040
|
5041 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); 5042 |
5043 v = rtvec_alloc (units); 5044
|
5415 /* We need to call this function after we to set CONST0_RTX first. */
5416 if (!CONST0_RTX (inner))
5417 abort ();
|
5045 /* We need to call this function after we set the scalar const_tiny_rtx 5046 entries. */ 5047 gcc_assert (const_tiny_rtx[constant][(int) inner]); |
5048 5049 for (i = 0; i < units; ++i)
|
5420 RTVEC_ELT (v, i) = CONST0_RTX (inner);
|
5050 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner]; |
5051 5052 tem = gen_rtx_raw_CONST_VECTOR (mode, v); 5053 return tem; 5054} 5055 5056/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
|
5427 all elements are zero. */
|
5057 all elements are zero, and the one vector when all elements are one. */ |
5058rtx 5059gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v) 5060{
|
5431 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
|
5061 enum machine_mode inner = GET_MODE_INNER (mode); 5062 int nunits = GET_MODE_NUNITS (mode); 5063 rtx x; |
5064 int i; 5065
|
5434 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5435 if (RTVEC_ELT (v, i) != inner_zero)
5436 return gen_rtx_raw_CONST_VECTOR (mode, v);
5437 return CONST0_RTX (mode);
|
5066 /* Check to see if all of the elements have the same value. */ 5067 x = RTVEC_ELT (v, nunits - 1); 5068 for (i = nunits - 2; i >= 0; i--) 5069 if (RTVEC_ELT (v, i) != x) 5070 break; 5071 5072 /* If the values are all the same, check to see if we can use one of the 5073 standard constant vectors. */ 5074 if (i == -1) 5075 { 5076 if (x == CONST0_RTX (inner)) 5077 return CONST0_RTX (mode); 5078 else if (x == CONST1_RTX (inner)) 5079 return CONST1_RTX (mode); 5080 } 5081 5082 return gen_rtx_raw_CONST_VECTOR (mode, v); |
5083} 5084 5085/* Create some permanent unique rtl objects shared between all functions. 5086 LINE_NUMBERS is nonzero if line numbers are to be generated. */ 5087 5088void 5089init_emit_once (int line_numbers) 5090{ 5091 int i; 5092 enum machine_mode mode; 5093 enum machine_mode double_mode; 5094 5095 /* We need reg_raw_mode, so initialize the modes now. */ 5096 init_reg_modes_once (); 5097 5098 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash 5099 tables. */ 5100 const_int_htab = htab_create_ggc (37, const_int_htab_hash, 5101 const_int_htab_eq, NULL); 5102 5103 const_double_htab = htab_create_ggc (37, const_double_htab_hash, 5104 const_double_htab_eq, NULL); 5105 5106 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash, 5107 mem_attrs_htab_eq, NULL); 5108 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash, 5109 reg_attrs_htab_eq, NULL); 5110 5111 no_line_numbers = ! line_numbers; 5112 5113 /* Compute the word and byte modes. */ 5114 5115 byte_mode = VOIDmode; 5116 word_mode = VOIDmode; 5117 double_mode = VOIDmode; 5118
|
5474 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
|
5119 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); 5120 mode != VOIDmode; |
5121 mode = GET_MODE_WIDER_MODE (mode)) 5122 { 5123 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT 5124 && byte_mode == VOIDmode) 5125 byte_mode = mode; 5126 5127 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD 5128 && word_mode == VOIDmode) 5129 word_mode = mode; 5130 } 5131
|
5486 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
|
5132 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); 5133 mode != VOIDmode; |
5134 mode = GET_MODE_WIDER_MODE (mode)) 5135 { 5136 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE 5137 && double_mode == VOIDmode) 5138 double_mode = mode; 5139 } 5140 5141 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0); 5142 5143 /* Assign register numbers to the globally defined register rtx. 5144 This must be done at runtime because the register number field 5145 is in a union and some compilers can't initialize unions. */ 5146
|
5500 pc_rtx = gen_rtx (PC, VOIDmode);
5501 cc0_rtx = gen_rtx (CC0, VOIDmode);
|
5147 pc_rtx = gen_rtx_PC (VOIDmode); 5148 cc0_rtx = gen_rtx_CC0 (VOIDmode); |
5149 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); 5150 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); 5151 if (hard_frame_pointer_rtx == 0) 5152 hard_frame_pointer_rtx = gen_raw_REG (Pmode, 5153 HARD_FRAME_POINTER_REGNUM); 5154 if (arg_pointer_rtx == 0) 5155 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM); 5156 virtual_incoming_args_rtx = 5157 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM); 5158 virtual_stack_vars_rtx = 5159 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM); 5160 virtual_stack_dynamic_rtx = 5161 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); 5162 virtual_outgoing_args_rtx = 5163 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); 5164 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); 5165 5166 /* Initialize RTL for commonly used hard registers. These are 5167 copied into regno_reg_rtx as we begin to compile each function. */ 5168 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 5169 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); 5170 5171#ifdef INIT_EXPANDERS 5172 /* This is to initialize {init|mark|free}_machine_status before the first 5173 call to push_function_context_to. This is needed by the Chill front 5174 end which calls push_function_context_to before the first call to 5175 init_function_start. */ 5176 INIT_EXPANDERS; 5177#endif 5178 5179 /* Create the unique rtx's for certain rtx codes and operand values. */ 5180
|
5534 /* Don't use gen_rtx here since gen_rtx in this case
|
5181 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case |
5182 tries to use these variables. */ 5183 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++) 5184 const_int_rtx[i + MAX_SAVED_CONST_INT] = 5185 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i); 5186 5187 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT 5188 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT) 5189 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT]; 5190 else 5191 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE); 5192 5193 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode); 5194 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode); 5195 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode); 5196 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode); 5197 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode); 5198 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode); 5199 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode); 5200 5201 dconsthalf = dconst1;
|
5555 dconsthalf.exp--;
|
5202 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1); |
5203 5204 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3); 5205 5206 /* Initialize mathematical constants for constant folding builtins. 5207 These constants need to be given to at least 160 bits precision. */ 5208 real_from_string (&dconstpi, 5209 "3.1415926535897932384626433832795028841971693993751058209749445923078"); 5210 real_from_string (&dconste, 5211 "2.7182818284590452353602874713526624977572470936999595749669676277241"); 5212 5213 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++) 5214 { 5215 REAL_VALUE_TYPE *r = 5216 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2); 5217
|
5571 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
|
5218 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); 5219 mode != VOIDmode; |
5220 mode = GET_MODE_WIDER_MODE (mode)) 5221 const_tiny_rtx[i][(int) mode] = 5222 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode); 5223
|
5224 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT); 5225 mode != VOIDmode; 5226 mode = GET_MODE_WIDER_MODE (mode)) 5227 const_tiny_rtx[i][(int) mode] = 5228 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode); 5229 |
5230 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i); 5231
|
5578 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
|
5232 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); 5233 mode != VOIDmode; |
5234 mode = GET_MODE_WIDER_MODE (mode)) 5235 const_tiny_rtx[i][(int) mode] = GEN_INT (i); 5236 5237 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT); 5238 mode != VOIDmode; 5239 mode = GET_MODE_WIDER_MODE (mode)) 5240 const_tiny_rtx[i][(int) mode] = GEN_INT (i); 5241 } 5242 5243 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT); 5244 mode != VOIDmode; 5245 mode = GET_MODE_WIDER_MODE (mode))
|
5591 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
|
5246 { 5247 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); 5248 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); 5249 } |
5250 5251 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT); 5252 mode != VOIDmode; 5253 mode = GET_MODE_WIDER_MODE (mode))
|
5596 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
|
5254 { 5255 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); 5256 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); 5257 } |
5258 5259 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) 5260 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC) 5261 const_tiny_rtx[0][i] = const0_rtx; 5262 5263 const_tiny_rtx[0][(int) BImode] = const0_rtx; 5264 if (STORE_FLAG_VALUE == 1) 5265 const_tiny_rtx[1][(int) BImode] = const1_rtx; 5266 5267#ifdef RETURN_ADDRESS_POINTER_REGNUM 5268 return_address_pointer_rtx 5269 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); 5270#endif 5271 5272#ifdef STATIC_CHAIN_REGNUM 5273 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM); 5274 5275#ifdef STATIC_CHAIN_INCOMING_REGNUM 5276 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM) 5277 static_chain_incoming_rtx 5278 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM); 5279 else 5280#endif 5281 static_chain_incoming_rtx = static_chain_rtx; 5282#endif 5283 5284#ifdef STATIC_CHAIN 5285 static_chain_rtx = STATIC_CHAIN; 5286 5287#ifdef STATIC_CHAIN_INCOMING 5288 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING; 5289#else 5290 static_chain_incoming_rtx = static_chain_rtx; 5291#endif 5292#endif 5293 5294 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) 5295 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); 5296} 5297
|
5637/* Query and clear/ restore no_line_numbers. This is used by the
5638 switch / case handling in stmt.c to give proper line numbers in
5639 warnings about unreachable code. */
5640
5641int
5642force_line_numbers (void)
5643{
5644 int old = no_line_numbers;
5645
5646 no_line_numbers = 0;
5647 if (old)
5648 force_next_line_note ();
5649 return old;
5650}
5651
5652void
5653restore_line_number_status (int old_value)
5654{
5655 no_line_numbers = old_value;
5656}
5657
|
5298/* Produce exact duplicate of insn INSN after AFTER. 5299 Care updating of libcall regions if present. */ 5300 5301rtx 5302emit_copy_of_insn_after (rtx insn, rtx after) 5303{ 5304 rtx new; 5305 rtx note1, note2, link; 5306 5307 switch (GET_CODE (insn)) 5308 { 5309 case INSN: 5310 new = emit_insn_after (copy_insn (PATTERN (insn)), after); 5311 break; 5312 5313 case JUMP_INSN: 5314 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after); 5315 break; 5316 5317 case CALL_INSN: 5318 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after); 5319 if (CALL_INSN_FUNCTION_USAGE (insn)) 5320 CALL_INSN_FUNCTION_USAGE (new) 5321 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn)); 5322 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn); 5323 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn); 5324 break; 5325 5326 default:
|
5687 abort ();
|
5327 gcc_unreachable (); |
5328 } 5329 5330 /* Update LABEL_NUSES. */ 5331 mark_jump_label (PATTERN (new), new, 0); 5332 5333 INSN_LOCATOR (new) = INSN_LOCATOR (insn); 5334
|
5335 /* If the old insn is frame related, then so is the new one. This is 5336 primarily needed for IA-64 unwind info which marks epilogue insns, 5337 which may be duplicated by the basic block reordering code. */ 5338 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn); 5339 |
5340 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will 5341 make them. */ 5342 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 5343 if (REG_NOTE_KIND (link) != REG_LABEL) 5344 { 5345 if (GET_CODE (link) == EXPR_LIST) 5346 REG_NOTES (new) 5347 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link), 5348 XEXP (link, 0), 5349 REG_NOTES (new))); 5350 else 5351 REG_NOTES (new) 5352 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link), 5353 XEXP (link, 0), 5354 REG_NOTES (new))); 5355 } 5356 5357 /* Fix the libcall sequences. */ 5358 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL) 5359 { 5360 rtx p = new; 5361 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL) 5362 p = PREV_INSN (p); 5363 XEXP (note1, 0) = p; 5364 XEXP (note2, 0) = new; 5365 } 5366 INSN_CODE (new) = INSN_CODE (insn); 5367 return new; 5368} 5369
|
5370static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; 5371rtx 5372gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno) 5373{ 5374 if (hard_reg_clobbers[mode][regno]) 5375 return hard_reg_clobbers[mode][regno]; 5376 else 5377 return (hard_reg_clobbers[mode][regno] = 5378 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno))); 5379} 5380 |
5381#include "gt-emit-rtl.h"
|