1/* Emit RTL for the GCC expander.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
26   This file contains support functions for creating rtl expressions
27   and manipulating them in the doubly-linked chain of insns.
28
29   The patterns of the insns are created by machine-dependent
30   routines in insn-emit.c, which is generated automatically from
31   the machine description.  These routines make the individual rtx's
32   of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33   which are automatically generated from rtl.def; what is machine
34   dependent is the kind of rtx's they make and what arguments they
35   use.  */
36
37#include "config.h"
38#include "system.h"
39#include "coretypes.h"
40#include "tm.h"
41#include "toplev.h"
42#include "rtl.h"
43#include "tree.h"
44#include "tm_p.h"
45#include "flags.h"
46#include "function.h"
47#include "expr.h"
48#include "regs.h"
49#include "hard-reg-set.h"
50#include "hashtab.h"
51#include "insn-config.h"
52#include "recog.h"
53#include "real.h"
54#include "bitmap.h"
55#include "basic-block.h"
56#include "ggc.h"
57#include "debug.h"
58#include "langhooks.h"
59#include "tree-pass.h"
60
61/* Commonly used modes.  */
62
63enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
64enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
65enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
66enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
67
68
69/* This is *not* reset after each function.  It gives each CODE_LABEL
70   in the entire compilation a unique label number.  */
71
72static GTY(()) int label_num = 1;
73
74/* Nonzero means do not generate NOTEs for source line numbers.  */
75
76static int no_line_numbers;
77
78/* Commonly used rtx's, so that we only need space for one copy.
79   These are initialized once for the entire compilation.
80   All of these are unique; no other rtx-object will be equal to any
81   of these.  */
82
83rtx global_rtl[GR_MAX];
84
85/* Commonly used RTL for hard registers.  These objects are not necessarily
86   unique, so we allocate them separately from global_rtl.  They are
87   initialized once per compilation unit, then copied into regno_reg_rtx
88   at the beginning of each function.  */
89static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
90
91/* We record floating-point CONST_DOUBLEs in each floating-point mode for
92   the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
93   record a copy of const[012]_rtx.  */
94
95rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
96
97rtx const_true_rtx;
98
99REAL_VALUE_TYPE dconst0;
100REAL_VALUE_TYPE dconst1;
101REAL_VALUE_TYPE dconst2;
102REAL_VALUE_TYPE dconst3;
103REAL_VALUE_TYPE dconst10;
104REAL_VALUE_TYPE dconstm1;
105REAL_VALUE_TYPE dconstm2;
106REAL_VALUE_TYPE dconsthalf;
107REAL_VALUE_TYPE dconstthird;
108REAL_VALUE_TYPE dconstpi;
109REAL_VALUE_TYPE dconste;
110
111/* All references to the following fixed hard registers go through
112   these unique rtl objects.  On machines where the frame-pointer and
113   arg-pointer are the same register, they use the same unique object.
114
115   After register allocation, other rtl objects which used to be pseudo-regs
116   may be clobbered to refer to the frame-pointer register.
117   But references that were originally to the frame-pointer can be
118   distinguished from the others because they contain frame_pointer_rtx.
119
120   When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
121   tricky: until register elimination has taken place hard_frame_pointer_rtx
122   should be used if it is being set, and frame_pointer_rtx otherwise.  After
123   register elimination hard_frame_pointer_rtx should always be used.
124   On machines where the two registers are same (most) then these are the
125   same.
126
127   In an inline procedure, the stack and frame pointer rtxs may not be
128   used for anything else.  */
129rtx static_chain_rtx;		/* (REG:Pmode STATIC_CHAIN_REGNUM) */
130rtx static_chain_incoming_rtx;	/* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
131rtx pic_offset_table_rtx;	/* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132
133/* This is used to implement __builtin_return_address for some machines.
134   See for instance the MIPS port.  */
135rtx return_address_pointer_rtx;	/* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136
137/* We make one copy of (const_int C) where C is in
138   [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
139   to save space during the compilation and simplify comparisons of
140   integers.  */
141
142rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143
144/* A hash table storing CONST_INTs whose absolute value is greater
145   than MAX_SAVED_CONST_INT.  */
146
147static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148     htab_t const_int_htab;
149
150/* A hash table storing memory attribute structures.  */
151static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
152     htab_t mem_attrs_htab;
153
154/* A hash table storing register attribute structures.  */
155static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
156     htab_t reg_attrs_htab;
157
158/* A hash table storing all CONST_DOUBLEs.  */
159static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
160     htab_t const_double_htab;
161
162#define first_insn (cfun->emit->x_first_insn)
163#define last_insn (cfun->emit->x_last_insn)
164#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
165#define last_location (cfun->emit->x_last_location)
166#define first_label_num (cfun->emit->x_first_label_num)
167
168static rtx make_call_insn_raw (rtx);
169static rtx find_line_note (rtx);
170static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
171static void unshare_all_decls (tree);
172static void reset_used_decls (tree);
173static void mark_label_nuses (rtx);
174static hashval_t const_int_htab_hash (const void *);
175static int const_int_htab_eq (const void *, const void *);
176static hashval_t const_double_htab_hash (const void *);
177static int const_double_htab_eq (const void *, const void *);
178static rtx lookup_const_double (rtx);
179static hashval_t mem_attrs_htab_hash (const void *);
180static int mem_attrs_htab_eq (const void *, const void *);
181static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
182				 enum machine_mode);
183static hashval_t reg_attrs_htab_hash (const void *);
184static int reg_attrs_htab_eq (const void *, const void *);
185static reg_attrs *get_reg_attrs (tree, int);
186static tree component_ref_for_mem_expr (tree);
187static rtx gen_const_vector (enum machine_mode, int);
188static void copy_rtx_if_shared_1 (rtx *orig);
189
190/* Probability of the conditional branch currently proceeded by try_split.
191   Set to -1 otherwise.  */
192int split_branch_probability = -1;
193
194/* Returns a hash code for X (which is a really a CONST_INT).  */
195
196static hashval_t
197const_int_htab_hash (const void *x)
198{
199  return (hashval_t) INTVAL ((rtx) x);
200}
201
202/* Returns nonzero if the value represented by X (which is really a
203   CONST_INT) is the same as that given by Y (which is really a
204   HOST_WIDE_INT *).  */
205
206static int
207const_int_htab_eq (const void *x, const void *y)
208{
209  return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210}
211
212/* Returns a hash code for X (which is really a CONST_DOUBLE).  */
213static hashval_t
214const_double_htab_hash (const void *x)
215{
216  rtx value = (rtx) x;
217  hashval_t h;
218
219  if (GET_MODE (value) == VOIDmode)
220    h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221  else
222    {
223      h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224      /* MODE is used in the comparison, so it should be in the hash.  */
225      h ^= GET_MODE (value);
226    }
227  return h;
228}
229
230/* Returns nonzero if the value represented by X (really a ...)
231   is the same as that represented by Y (really a ...) */
232static int
233const_double_htab_eq (const void *x, const void *y)
234{
235  rtx a = (rtx)x, b = (rtx)y;
236
237  if (GET_MODE (a) != GET_MODE (b))
238    return 0;
239  if (GET_MODE (a) == VOIDmode)
240    return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242  else
243    return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244			   CONST_DOUBLE_REAL_VALUE (b));
245}
246
247/* Returns a hash code for X (which is a really a mem_attrs *).  */
248
249static hashval_t
250mem_attrs_htab_hash (const void *x)
251{
252  mem_attrs *p = (mem_attrs *) x;
253
254  return (p->alias ^ (p->align * 1000)
255	  ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256	  ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257	  ^ (size_t) iterative_hash_expr (p->expr, 0));
258}
259
260/* Returns nonzero if the value represented by X (which is really a
261   mem_attrs *) is the same as that given by Y (which is also really a
262   mem_attrs *).  */
263
264static int
265mem_attrs_htab_eq (const void *x, const void *y)
266{
267  mem_attrs *p = (mem_attrs *) x;
268  mem_attrs *q = (mem_attrs *) y;
269
270  return (p->alias == q->alias && p->offset == q->offset
271	  && p->size == q->size && p->align == q->align
272	  && (p->expr == q->expr
273	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
274		  && operand_equal_p (p->expr, q->expr, 0))));
275}
276
277/* Allocate a new mem_attrs structure and insert it into the hash table if
278   one identical to it is not already in the table.  We are doing this for
279   MEM of mode MODE.  */
280
281static mem_attrs *
282get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
283	       unsigned int align, enum machine_mode mode)
284{
285  mem_attrs attrs;
286  void **slot;
287
288  /* If everything is the default, we can just return zero.
289     This must match what the corresponding MEM_* macros return when the
290     field is not present.  */
291  if (alias == 0 && expr == 0 && offset == 0
292      && (size == 0
293	  || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
294      && (STRICT_ALIGNMENT && mode != BLKmode
295	  ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
296    return 0;
297
298  attrs.alias = alias;
299  attrs.expr = expr;
300  attrs.offset = offset;
301  attrs.size = size;
302  attrs.align = align;
303
304  slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
305  if (*slot == 0)
306    {
307      *slot = ggc_alloc (sizeof (mem_attrs));
308      memcpy (*slot, &attrs, sizeof (mem_attrs));
309    }
310
311  return *slot;
312}
313
314/* Returns a hash code for X (which is a really a reg_attrs *).  */
315
316static hashval_t
317reg_attrs_htab_hash (const void *x)
318{
319  reg_attrs *p = (reg_attrs *) x;
320
321  return ((p->offset * 1000) ^ (long) p->decl);
322}
323
324/* Returns nonzero if the value represented by X (which is really a
325   reg_attrs *) is the same as that given by Y (which is also really a
326   reg_attrs *).  */
327
328static int
329reg_attrs_htab_eq (const void *x, const void *y)
330{
331  reg_attrs *p = (reg_attrs *) x;
332  reg_attrs *q = (reg_attrs *) y;
333
334  return (p->decl == q->decl && p->offset == q->offset);
335}
336/* Allocate a new reg_attrs structure and insert it into the hash table if
337   one identical to it is not already in the table.  We are doing this for
338   MEM of mode MODE.  */
339
340static reg_attrs *
341get_reg_attrs (tree decl, int offset)
342{
343  reg_attrs attrs;
344  void **slot;
345
346  /* If everything is the default, we can just return zero.  */
347  if (decl == 0 && offset == 0)
348    return 0;
349
350  attrs.decl = decl;
351  attrs.offset = offset;
352
353  slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
354  if (*slot == 0)
355    {
356      *slot = ggc_alloc (sizeof (reg_attrs));
357      memcpy (*slot, &attrs, sizeof (reg_attrs));
358    }
359
360  return *slot;
361}
362
363/* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
364   don't attempt to share with the various global pieces of rtl (such as
365   frame_pointer_rtx).  */
366
367rtx
368gen_raw_REG (enum machine_mode mode, int regno)
369{
370  rtx x = gen_rtx_raw_REG (mode, regno);
371  ORIGINAL_REGNO (x) = regno;
372  return x;
373}
374
375/* There are some RTL codes that require special attention; the generation
376   functions do the raw handling.  If you add to this list, modify
377   special_rtx in gengenrtl.c as well.  */
378
379rtx
380gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
381{
382  void **slot;
383
384  if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
385    return const_int_rtx[arg + MAX_SAVED_CONST_INT];
386
387#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
388  if (const_true_rtx && arg == STORE_FLAG_VALUE)
389    return const_true_rtx;
390#endif
391
392  /* Look up the CONST_INT in the hash table.  */
393  slot = htab_find_slot_with_hash (const_int_htab, &arg,
394				   (hashval_t) arg, INSERT);
395  if (*slot == 0)
396    *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
397
398  return (rtx) *slot;
399}
400
401rtx
402gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
403{
404  return GEN_INT (trunc_int_for_mode (c, mode));
405}
406
407/* CONST_DOUBLEs might be created from pairs of integers, or from
408   REAL_VALUE_TYPEs.  Also, their length is known only at run time,
409   so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
410
411/* Determine whether REAL, a CONST_DOUBLE, already exists in the
412   hash table.  If so, return its counterpart; otherwise add it
413   to the hash table and return it.  */
414static rtx
415lookup_const_double (rtx real)
416{
417  void **slot = htab_find_slot (const_double_htab, real, INSERT);
418  if (*slot == 0)
419    *slot = real;
420
421  return (rtx) *slot;
422}
423
424/* Return a CONST_DOUBLE rtx for a floating-point value specified by
425   VALUE in mode MODE.  */
426rtx
427const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
428{
429  rtx real = rtx_alloc (CONST_DOUBLE);
430  PUT_MODE (real, mode);
431
432  real->u.rv = value;
433
434  return lookup_const_double (real);
435}
436
437/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
438   of ints: I0 is the low-order word and I1 is the high-order word.
439   Do not use this routine for non-integer modes; convert to
440   REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
441
442rtx
443immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
444{
445  rtx value;
446  unsigned int i;
447
448  /* There are the following cases (note that there are no modes with
449     HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
450
451     1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
452	gen_int_mode.
453     2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
454	the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
455	from copies of the sign bit, and sign of i0 and i1 are the same),  then
456	we return a CONST_INT for i0.
457     3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
458  if (mode != VOIDmode)
459    {
460      gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
461		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
462		  /* We can get a 0 for an error mark.  */
463		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
464		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
465
466      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
467	return gen_int_mode (i0, mode);
468
469      gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
470    }
471
472  /* If this integer fits in one word, return a CONST_INT.  */
473  if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
474    return GEN_INT (i0);
475
476  /* We use VOIDmode for integers.  */
477  value = rtx_alloc (CONST_DOUBLE);
478  PUT_MODE (value, VOIDmode);
479
480  CONST_DOUBLE_LOW (value) = i0;
481  CONST_DOUBLE_HIGH (value) = i1;
482
483  for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
484    XWINT (value, i) = 0;
485
486  return lookup_const_double (value);
487}
488
489rtx
490gen_rtx_REG (enum machine_mode mode, unsigned int regno)
491{
492  /* In case the MD file explicitly references the frame pointer, have
493     all such references point to the same frame pointer.  This is
494     used during frame pointer elimination to distinguish the explicit
495     references to these registers from pseudos that happened to be
496     assigned to them.
497
498     If we have eliminated the frame pointer or arg pointer, we will
499     be using it as a normal register, for example as a spill
500     register.  In such cases, we might be accessing it in a mode that
501     is not Pmode and therefore cannot use the pre-allocated rtx.
502
503     Also don't do this when we are making new REGs in reload, since
504     we don't want to get confused with the real pointers.  */
505
506  if (mode == Pmode && !reload_in_progress)
507    {
508      if (regno == FRAME_POINTER_REGNUM
509	  && (!reload_completed || frame_pointer_needed))
510	return frame_pointer_rtx;
511#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
512      if (regno == HARD_FRAME_POINTER_REGNUM
513	  && (!reload_completed || frame_pointer_needed))
514	return hard_frame_pointer_rtx;
515#endif
516#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
517      if (regno == ARG_POINTER_REGNUM)
518	return arg_pointer_rtx;
519#endif
520#ifdef RETURN_ADDRESS_POINTER_REGNUM
521      if (regno == RETURN_ADDRESS_POINTER_REGNUM)
522	return return_address_pointer_rtx;
523#endif
524      if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
525	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
526	return pic_offset_table_rtx;
527      if (regno == STACK_POINTER_REGNUM)
528	return stack_pointer_rtx;
529    }
530
531#if 0
532  /* If the per-function register table has been set up, try to re-use
533     an existing entry in that table to avoid useless generation of RTL.
534
535     This code is disabled for now until we can fix the various backends
536     which depend on having non-shared hard registers in some cases.   Long
537     term we want to re-enable this code as it can significantly cut down
538     on the amount of useless RTL that gets generated.
539
540     We'll also need to fix some code that runs after reload that wants to
541     set ORIGINAL_REGNO.  */
542
543  if (cfun
544      && cfun->emit
545      && regno_reg_rtx
546      && regno < FIRST_PSEUDO_REGISTER
547      && reg_raw_mode[regno] == mode)
548    return regno_reg_rtx[regno];
549#endif
550
551  return gen_raw_REG (mode, regno);
552}
553
554rtx
555gen_rtx_MEM (enum machine_mode mode, rtx addr)
556{
557  rtx rt = gen_rtx_raw_MEM (mode, addr);
558
559  /* This field is not cleared by the mere allocation of the rtx, so
560     we clear it here.  */
561  MEM_ATTRS (rt) = 0;
562
563  return rt;
564}
565
566/* Generate a memory referring to non-trapping constant memory.  */
567
568rtx
569gen_const_mem (enum machine_mode mode, rtx addr)
570{
571  rtx mem = gen_rtx_MEM (mode, addr);
572  MEM_READONLY_P (mem) = 1;
573  MEM_NOTRAP_P (mem) = 1;
574  return mem;
575}
576
577/* Generate a MEM referring to fixed portions of the frame, e.g., register
578   save areas.  */
579
580rtx
581gen_frame_mem (enum machine_mode mode, rtx addr)
582{
583  rtx mem = gen_rtx_MEM (mode, addr);
584  MEM_NOTRAP_P (mem) = 1;
585  set_mem_alias_set (mem, get_frame_alias_set ());
586  return mem;
587}
588
589/* Generate a MEM referring to a temporary use of the stack, not part
590    of the fixed stack frame.  For example, something which is pushed
591    by a target splitter.  */
592rtx
593gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
594{
595  rtx mem = gen_rtx_MEM (mode, addr);
596  MEM_NOTRAP_P (mem) = 1;
597  if (!current_function_calls_alloca)
598    set_mem_alias_set (mem, get_frame_alias_set ());
599  return mem;
600}
601
602/* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
603   this construct would be valid, and false otherwise.  */
604
605bool
606validate_subreg (enum machine_mode omode, enum machine_mode imode,
607		 rtx reg, unsigned int offset)
608{
609  unsigned int isize = GET_MODE_SIZE (imode);
610  unsigned int osize = GET_MODE_SIZE (omode);
611
612  /* All subregs must be aligned.  */
613  if (offset % osize != 0)
614    return false;
615
616  /* The subreg offset cannot be outside the inner object.  */
617  if (offset >= isize)
618    return false;
619
620  /* ??? This should not be here.  Temporarily continue to allow word_mode
621     subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
622     Generally, backends are doing something sketchy but it'll take time to
623     fix them all.  */
624  if (omode == word_mode)
625    ;
626  /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
627     is the culprit here, and not the backends.  */
628  else if (osize >= UNITS_PER_WORD && isize >= osize)
629    ;
630  /* Allow component subregs of complex and vector.  Though given the below
631     extraction rules, it's not always clear what that means.  */
632  else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
633	   && GET_MODE_INNER (imode) == omode)
634    ;
635  /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
636     i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
637     represent this.  It's questionable if this ought to be represented at
638     all -- why can't this all be hidden in post-reload splitters that make
639     arbitrarily mode changes to the registers themselves.  */
640  else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
641    ;
642  /* Subregs involving floating point modes are not allowed to
643     change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
644     (subreg:SI (reg:DF) 0) isn't.  */
645  else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
646    {
647      if (isize != osize)
648	return false;
649    }
650
651  /* Paradoxical subregs must have offset zero.  */
652  if (osize > isize)
653    return offset == 0;
654
655  /* This is a normal subreg.  Verify that the offset is representable.  */
656
657  /* For hard registers, we already have most of these rules collected in
658     subreg_offset_representable_p.  */
659  if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
660    {
661      unsigned int regno = REGNO (reg);
662
663#ifdef CANNOT_CHANGE_MODE_CLASS
664      if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
665	  && GET_MODE_INNER (imode) == omode)
666	;
667      else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
668	return false;
669#endif
670
671      return subreg_offset_representable_p (regno, imode, offset, omode);
672    }
673
674  /* For pseudo registers, we want most of the same checks.  Namely:
675     If the register no larger than a word, the subreg must be lowpart.
676     If the register is larger than a word, the subreg must be the lowpart
677     of a subword.  A subreg does *not* perform arbitrary bit extraction.
678     Given that we've already checked mode/offset alignment, we only have
679     to check subword subregs here.  */
680  if (osize < UNITS_PER_WORD)
681    {
682      enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
683      unsigned int low_off = subreg_lowpart_offset (omode, wmode);
684      if (offset % UNITS_PER_WORD != low_off)
685	return false;
686    }
687  return true;
688}
689
690rtx
691gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
692{
693  gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
694  return gen_rtx_raw_SUBREG (mode, reg, offset);
695}
696
697/* Generate a SUBREG representing the least-significant part of REG if MODE
698   is smaller than mode of REG, otherwise paradoxical SUBREG.  */
699
700rtx
701gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
702{
703  enum machine_mode inmode;
704
705  inmode = GET_MODE (reg);
706  if (inmode == VOIDmode)
707    inmode = mode;
708  return gen_rtx_SUBREG (mode, reg,
709			 subreg_lowpart_offset (mode, inmode));
710}
711
712/* gen_rtvec (n, [rt1, ..., rtn])
713**
714**	    This routine creates an rtvec and stores within it the
715**	pointers to rtx's which are its arguments.
716*/
717
718/*VARARGS1*/
719rtvec
720gen_rtvec (int n, ...)
721{
722  int i, save_n;
723  rtx *vector;
724  va_list p;
725
726  va_start (p, n);
727
728  if (n == 0)
729    return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
730
731  vector = alloca (n * sizeof (rtx));
732
733  for (i = 0; i < n; i++)
734    vector[i] = va_arg (p, rtx);
735
736  /* The definition of VA_* in K&R C causes `n' to go out of scope.  */
737  save_n = n;
738  va_end (p);
739
740  return gen_rtvec_v (save_n, vector);
741}
742
743rtvec
744gen_rtvec_v (int n, rtx *argp)
745{
746  int i;
747  rtvec rt_val;
748
749  if (n == 0)
750    return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
751
752  rt_val = rtvec_alloc (n);	/* Allocate an rtvec...			*/
753
754  for (i = 0; i < n; i++)
755    rt_val->elem[i] = *argp++;
756
757  return rt_val;
758}
759
760/* Generate a REG rtx for a new pseudo register of mode MODE.
761   This pseudo is assigned the next sequential register number.  */
762
763rtx
764gen_reg_rtx (enum machine_mode mode)
765{
766  struct function *f = cfun;
767  rtx val;
768
769  /* Don't let anything called after initial flow analysis create new
770     registers.  */
771  gcc_assert (!no_new_pseudos);
772
773  if (generating_concat_p
774      && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
775	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
776    {
777      /* For complex modes, don't make a single pseudo.
778	 Instead, make a CONCAT of two pseudos.
779	 This allows noncontiguous allocation of the real and imaginary parts,
780	 which makes much better code.  Besides, allocating DCmode
781	 pseudos overstrains reload on some machines like the 386.  */
782      rtx realpart, imagpart;
783      enum machine_mode partmode = GET_MODE_INNER (mode);
784
785      realpart = gen_reg_rtx (partmode);
786      imagpart = gen_reg_rtx (partmode);
787      return gen_rtx_CONCAT (mode, realpart, imagpart);
788    }
789
790  /* Make sure regno_pointer_align, and regno_reg_rtx are large
791     enough to have an element for this pseudo reg number.  */
792
793  if (reg_rtx_no == f->emit->regno_pointer_align_length)
794    {
795      int old_size = f->emit->regno_pointer_align_length;
796      char *new;
797      rtx *new1;
798
799      new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
800      memset (new + old_size, 0, old_size);
801      f->emit->regno_pointer_align = (unsigned char *) new;
802
803      new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
804			  old_size * 2 * sizeof (rtx));
805      memset (new1 + old_size, 0, old_size * sizeof (rtx));
806      regno_reg_rtx = new1;
807
808      f->emit->regno_pointer_align_length = old_size * 2;
809    }
810
811  val = gen_raw_REG (mode, reg_rtx_no);
812  regno_reg_rtx[reg_rtx_no++] = val;
813  return val;
814}
815
816/* Generate a register with same attributes as REG, but offsetted by OFFSET.
817   Do the big endian correction if needed.  */
818
819rtx
820gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
821{
822  rtx new = gen_rtx_REG (mode, regno);
823  tree decl;
824  HOST_WIDE_INT var_size;
825
826  /* PR middle-end/14084
827     The problem appears when a variable is stored in a larger register
828     and later it is used in the original mode or some mode in between
829     or some part of variable is accessed.
830
831     On little endian machines there is no problem because
832     the REG_OFFSET of the start of the variable is the same when
833     accessed in any mode (it is 0).
834
835     However, this is not true on big endian machines.
836     The offset of the start of the variable is different when accessed
837     in different modes.
838     When we are taking a part of the REG we have to change the OFFSET
839     from offset WRT size of mode of REG to offset WRT size of variable.
840
841     If we would not do the big endian correction the resulting REG_OFFSET
842     would be larger than the size of the DECL.
843
844     Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
845
846     REG.mode  MODE  DECL size  old offset  new offset  description
847     DI        SI    4          4           0           int32 in SImode
848     DI        SI    1          4           0           char in SImode
849     DI        QI    1          7           0           char in QImode
850     DI        QI    4          5           1           1st element in QImode
851                                                        of char[4]
852     DI        HI    4          6           2           1st element in HImode
853                                                        of int16[2]
854
855     If the size of DECL is equal or greater than the size of REG
856     we can't do this correction because the register holds the
857     whole variable or a part of the variable and thus the REG_OFFSET
858     is already correct.  */
859
860  decl = REG_EXPR (reg);
861  if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
862      && decl != NULL
863      && offset > 0
864      && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
865      && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
866	  && var_size < GET_MODE_SIZE (GET_MODE (reg))))
867    {
868      int offset_le;
869
870      /* Convert machine endian to little endian WRT size of mode of REG.  */
871      if (WORDS_BIG_ENDIAN)
872	offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
873		     / UNITS_PER_WORD) * UNITS_PER_WORD;
874      else
875	offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
876
877      if (BYTES_BIG_ENDIAN)
878	offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
879		      % UNITS_PER_WORD);
880      else
881	offset_le += offset % UNITS_PER_WORD;
882
883      if (offset_le >= var_size)
884	{
885	  /* MODE is wider than the variable so the new reg will cover
886	     the whole variable so the resulting OFFSET should be 0.  */
887	  offset = 0;
888	}
889      else
890	{
891	  /* Convert little endian to machine endian WRT size of variable.  */
892	  if (WORDS_BIG_ENDIAN)
893	    offset = ((var_size - 1 - offset_le)
894		      / UNITS_PER_WORD) * UNITS_PER_WORD;
895	  else
896	    offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
897
898	  if (BYTES_BIG_ENDIAN)
899	    offset += ((var_size - 1 - offset_le)
900		       % UNITS_PER_WORD);
901	  else
902	    offset += offset_le % UNITS_PER_WORD;
903	}
904    }
905
906  REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
907				   REG_OFFSET (reg) + offset);
908  return new;
909}
910
911/* Set the decl for MEM to DECL.  */
912
913void
914set_reg_attrs_from_mem (rtx reg, rtx mem)
915{
916  if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
917    REG_ATTRS (reg)
918      = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
919}
920
921/* Set the register attributes for registers contained in PARM_RTX.
922   Use needed values from memory attributes of MEM.  */
923
924void
925set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
926{
927  if (REG_P (parm_rtx))
928    set_reg_attrs_from_mem (parm_rtx, mem);
929  else if (GET_CODE (parm_rtx) == PARALLEL)
930    {
931      /* Check for a NULL entry in the first slot, used to indicate that the
932	 parameter goes both on the stack and in registers.  */
933      int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
934      for (; i < XVECLEN (parm_rtx, 0); i++)
935	{
936	  rtx x = XVECEXP (parm_rtx, 0, i);
937	  if (REG_P (XEXP (x, 0)))
938	    REG_ATTRS (XEXP (x, 0))
939	      = get_reg_attrs (MEM_EXPR (mem),
940			       INTVAL (XEXP (x, 1)));
941	}
942    }
943}
944
945/* Assign the RTX X to declaration T.  */
946void
947set_decl_rtl (tree t, rtx x)
948{
949  DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
950
951  if (!x)
952    return;
953  /* For register, we maintain the reverse information too.  */
954  if (REG_P (x))
955    REG_ATTRS (x) = get_reg_attrs (t, 0);
956  else if (GET_CODE (x) == SUBREG)
957    REG_ATTRS (SUBREG_REG (x))
958      = get_reg_attrs (t, -SUBREG_BYTE (x));
959  if (GET_CODE (x) == CONCAT)
960    {
961      if (REG_P (XEXP (x, 0)))
962        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
963      if (REG_P (XEXP (x, 1)))
964	REG_ATTRS (XEXP (x, 1))
965	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
966    }
967  if (GET_CODE (x) == PARALLEL)
968    {
969      int i;
970      for (i = 0; i < XVECLEN (x, 0); i++)
971	{
972	  rtx y = XVECEXP (x, 0, i);
973	  if (REG_P (XEXP (y, 0)))
974	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
975	}
976    }
977}
978
979/* Assign the RTX X to parameter declaration T.  */
980void
981set_decl_incoming_rtl (tree t, rtx x)
982{
983  DECL_INCOMING_RTL (t) = x;
984
985  if (!x)
986    return;
987  /* For register, we maintain the reverse information too.  */
988  if (REG_P (x))
989    REG_ATTRS (x) = get_reg_attrs (t, 0);
990  else if (GET_CODE (x) == SUBREG)
991    REG_ATTRS (SUBREG_REG (x))
992      = get_reg_attrs (t, -SUBREG_BYTE (x));
993  if (GET_CODE (x) == CONCAT)
994    {
995      if (REG_P (XEXP (x, 0)))
996        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
997      if (REG_P (XEXP (x, 1)))
998	REG_ATTRS (XEXP (x, 1))
999	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1000    }
1001  if (GET_CODE (x) == PARALLEL)
1002    {
1003      int i, start;
1004
1005      /* Check for a NULL entry, used to indicate that the parameter goes
1006	 both on the stack and in registers.  */
1007      if (XEXP (XVECEXP (x, 0, 0), 0))
1008	start = 0;
1009      else
1010	start = 1;
1011
1012      for (i = start; i < XVECLEN (x, 0); i++)
1013	{
1014	  rtx y = XVECEXP (x, 0, i);
1015	  if (REG_P (XEXP (y, 0)))
1016	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1017	}
1018    }
1019}
1020
1021/* Identify REG (which may be a CONCAT) as a user register.  */
1022
1023void
1024mark_user_reg (rtx reg)
1025{
1026  if (GET_CODE (reg) == CONCAT)
1027    {
1028      REG_USERVAR_P (XEXP (reg, 0)) = 1;
1029      REG_USERVAR_P (XEXP (reg, 1)) = 1;
1030    }
1031  else
1032    {
1033      gcc_assert (REG_P (reg));
1034      REG_USERVAR_P (reg) = 1;
1035    }
1036}
1037
1038/* Identify REG as a probable pointer register and show its alignment
1039   as ALIGN, if nonzero.  */
1040
1041void
1042mark_reg_pointer (rtx reg, int align)
1043{
1044  if (! REG_POINTER (reg))
1045    {
1046      REG_POINTER (reg) = 1;
1047
1048      if (align)
1049	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1050    }
1051  else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1052    /* We can no-longer be sure just how aligned this pointer is.  */
1053    REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1054}
1055
1056/* Return 1 plus largest pseudo reg number used in the current function.  */
1057
1058int
1059max_reg_num (void)
1060{
1061  return reg_rtx_no;
1062}
1063
1064/* Return 1 + the largest label number used so far in the current function.  */
1065
1066int
1067max_label_num (void)
1068{
1069  return label_num;
1070}
1071
1072/* Return first label number used in this function (if any were used).  */
1073
1074int
1075get_first_label_num (void)
1076{
1077  return first_label_num;
1078}
1079
1080/* If the rtx for label was created during the expansion of a nested
1081   function, then first_label_num won't include this label number.
1082   Fix this now so that array indicies work later.  */
1083
1084void
1085maybe_set_first_label_num (rtx x)
1086{
1087  if (CODE_LABEL_NUMBER (x) < first_label_num)
1088    first_label_num = CODE_LABEL_NUMBER (x);
1089}
1090
1091/* Return a value representing some low-order bits of X, where the number
1092   of low-order bits is given by MODE.  Note that no conversion is done
1093   between floating-point and fixed-point values, rather, the bit
1094   representation is returned.
1095
1096   This function handles the cases in common between gen_lowpart, below,
1097   and two variants in cse.c and combine.c.  These are the cases that can
1098   be safely handled at all points in the compilation.
1099
1100   If this is not a case we can handle, return 0.  */
1101
1102rtx
1103gen_lowpart_common (enum machine_mode mode, rtx x)
1104{
1105  int msize = GET_MODE_SIZE (mode);
1106  int xsize;
1107  int offset = 0;
1108  enum machine_mode innermode;
1109
1110  /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1111     so we have to make one up.  Yuk.  */
1112  innermode = GET_MODE (x);
1113  if (GET_CODE (x) == CONST_INT
1114      && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1115    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1116  else if (innermode == VOIDmode)
1117    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1118
1119  xsize = GET_MODE_SIZE (innermode);
1120
1121  gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1122
1123  if (innermode == mode)
1124    return x;
1125
1126  /* MODE must occupy no more words than the mode of X.  */
1127  if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1128      > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1129    return 0;
1130
1131  /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1132  if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1133    return 0;
1134
1135  offset = subreg_lowpart_offset (mode, innermode);
1136
1137  if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1138      && (GET_MODE_CLASS (mode) == MODE_INT
1139	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1140    {
1141      /* If we are getting the low-order part of something that has been
1142	 sign- or zero-extended, we can either just use the object being
1143	 extended or make a narrower extension.  If we want an even smaller
1144	 piece than the size of the object being extended, call ourselves
1145	 recursively.
1146
1147	 This case is used mostly by combine and cse.  */
1148
1149      if (GET_MODE (XEXP (x, 0)) == mode)
1150	return XEXP (x, 0);
1151      else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1152	return gen_lowpart_common (mode, XEXP (x, 0));
1153      else if (msize < xsize)
1154	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1155    }
1156  else if (GET_CODE (x) == SUBREG || REG_P (x)
1157	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1158	   || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1159    return simplify_gen_subreg (mode, x, innermode, offset);
1160
1161  /* Otherwise, we can't do this.  */
1162  return 0;
1163}
1164
1165rtx
1166gen_highpart (enum machine_mode mode, rtx x)
1167{
1168  unsigned int msize = GET_MODE_SIZE (mode);
1169  rtx result;
1170
1171  /* This case loses if X is a subreg.  To catch bugs early,
1172     complain if an invalid MODE is used even in other cases.  */
1173  gcc_assert (msize <= UNITS_PER_WORD
1174	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1175
1176  result = simplify_gen_subreg (mode, x, GET_MODE (x),
1177				subreg_highpart_offset (mode, GET_MODE (x)));
1178  gcc_assert (result);
1179
1180  /* simplify_gen_subreg is not guaranteed to return a valid operand for
1181     the target if we have a MEM.  gen_highpart must return a valid operand,
1182     emitting code if necessary to do so.  */
1183  if (MEM_P (result))
1184    {
1185      result = validize_mem (result);
1186      gcc_assert (result);
1187    }
1188
1189  return result;
1190}
1191
1192/* Like gen_highpart, but accept mode of EXP operand in case EXP can
1193   be VOIDmode constant.  */
1194rtx
1195gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1196{
1197  if (GET_MODE (exp) != VOIDmode)
1198    {
1199      gcc_assert (GET_MODE (exp) == innermode);
1200      return gen_highpart (outermode, exp);
1201    }
1202  return simplify_gen_subreg (outermode, exp, innermode,
1203			      subreg_highpart_offset (outermode, innermode));
1204}
1205
1206/* Return offset in bytes to get OUTERMODE low part
1207   of the value in mode INNERMODE stored in memory in target format.  */
1208
1209unsigned int
1210subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1211{
1212  unsigned int offset = 0;
1213  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1214
1215  if (difference > 0)
1216    {
1217      if (WORDS_BIG_ENDIAN)
1218	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1219      if (BYTES_BIG_ENDIAN)
1220	offset += difference % UNITS_PER_WORD;
1221    }
1222
1223  return offset;
1224}
1225
1226/* Return offset in bytes to get OUTERMODE high part
1227   of the value in mode INNERMODE stored in memory in target format.  */
1228unsigned int
1229subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1230{
1231  unsigned int offset = 0;
1232  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1233
1234  gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1235
1236  if (difference > 0)
1237    {
1238      if (! WORDS_BIG_ENDIAN)
1239	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1240      if (! BYTES_BIG_ENDIAN)
1241	offset += difference % UNITS_PER_WORD;
1242    }
1243
1244  return offset;
1245}
1246
1247/* Return 1 iff X, assumed to be a SUBREG,
1248   refers to the least significant part of its containing reg.
1249   If X is not a SUBREG, always return 1 (it is its own low part!).  */
1250
1251int
1252subreg_lowpart_p (rtx x)
1253{
1254  if (GET_CODE (x) != SUBREG)
1255    return 1;
1256  else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1257    return 0;
1258
1259  return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1260	  == SUBREG_BYTE (x));
1261}
1262
1263/* Return subword OFFSET of operand OP.
1264   The word number, OFFSET, is interpreted as the word number starting
1265   at the low-order address.  OFFSET 0 is the low-order word if not
1266   WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1267
1268   If we cannot extract the required word, we return zero.  Otherwise,
1269   an rtx corresponding to the requested word will be returned.
1270
1271   VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1272   reload has completed, a valid address will always be returned.  After
1273   reload, if a valid address cannot be returned, we return zero.
1274
1275   If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1276   it is the responsibility of the caller.
1277
1278   MODE is the mode of OP in case it is a CONST_INT.
1279
1280   ??? This is still rather broken for some cases.  The problem for the
1281   moment is that all callers of this thing provide no 'goal mode' to
1282   tell us to work with.  This exists because all callers were written
1283   in a word based SUBREG world.
1284   Now use of this function can be deprecated by simplify_subreg in most
1285   cases.
1286 */
1287
1288rtx
1289operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1290{
1291  if (mode == VOIDmode)
1292    mode = GET_MODE (op);
1293
1294  gcc_assert (mode != VOIDmode);
1295
1296  /* If OP is narrower than a word, fail.  */
1297  if (mode != BLKmode
1298      && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1299    return 0;
1300
1301  /* If we want a word outside OP, return zero.  */
1302  if (mode != BLKmode
1303      && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1304    return const0_rtx;
1305
1306  /* Form a new MEM at the requested address.  */
1307  if (MEM_P (op))
1308    {
1309      rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1310
1311      if (! validate_address)
1312	return new;
1313
1314      else if (reload_completed)
1315	{
1316	  if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1317	    return 0;
1318	}
1319      else
1320	return replace_equiv_address (new, XEXP (new, 0));
1321    }
1322
1323  /* Rest can be handled by simplify_subreg.  */
1324  return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1325}
1326
1327/* Similar to `operand_subword', but never return 0.  If we can't
1328   extract the required subword, put OP into a register and try again.
1329   The second attempt must succeed.  We always validate the address in
1330   this case.
1331
1332   MODE is the mode of OP, in case it is CONST_INT.  */
1333
1334rtx
1335operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1336{
1337  rtx result = operand_subword (op, offset, 1, mode);
1338
1339  if (result)
1340    return result;
1341
1342  if (mode != BLKmode && mode != VOIDmode)
1343    {
1344      /* If this is a register which can not be accessed by words, copy it
1345	 to a pseudo register.  */
1346      if (REG_P (op))
1347	op = copy_to_reg (op);
1348      else
1349	op = force_reg (mode, op);
1350    }
1351
1352  result = operand_subword (op, offset, 1, mode);
1353  gcc_assert (result);
1354
1355  return result;
1356}
1357
1358/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1359   or (2) a component ref of something variable.  Represent the later with
1360   a NULL expression.  */
1361
1362static tree
1363component_ref_for_mem_expr (tree ref)
1364{
1365  tree inner = TREE_OPERAND (ref, 0);
1366
1367  if (TREE_CODE (inner) == COMPONENT_REF)
1368    inner = component_ref_for_mem_expr (inner);
1369  else
1370    {
1371      /* Now remove any conversions: they don't change what the underlying
1372	 object is.  Likewise for SAVE_EXPR.  */
1373      while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1374	     || TREE_CODE (inner) == NON_LVALUE_EXPR
1375	     || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1376	     || TREE_CODE (inner) == SAVE_EXPR)
1377	inner = TREE_OPERAND (inner, 0);
1378
1379      if (! DECL_P (inner))
1380	inner = NULL_TREE;
1381    }
1382
1383  if (inner == TREE_OPERAND (ref, 0))
1384    return ref;
1385  else
1386    return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1387		   TREE_OPERAND (ref, 1), NULL_TREE);
1388}
1389
1390/* Returns 1 if both MEM_EXPR can be considered equal
1391   and 0 otherwise.  */
1392
1393int
1394mem_expr_equal_p (tree expr1, tree expr2)
1395{
1396  if (expr1 == expr2)
1397    return 1;
1398
1399  if (! expr1 || ! expr2)
1400    return 0;
1401
1402  if (TREE_CODE (expr1) != TREE_CODE (expr2))
1403    return 0;
1404
1405  if (TREE_CODE (expr1) == COMPONENT_REF)
1406    return
1407      mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1408			TREE_OPERAND (expr2, 0))
1409      && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1410			   TREE_OPERAND (expr2, 1));
1411
1412  if (INDIRECT_REF_P (expr1))
1413    return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1414			     TREE_OPERAND (expr2, 0));
1415
1416  /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1417	      have been resolved here.  */
1418  gcc_assert (DECL_P (expr1));
1419
1420  /* Decls with different pointers can't be equal.  */
1421  return 0;
1422}
1423
1424/* Given REF, a MEM, and T, either the type of X or the expression
1425   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1426   if we are making a new object of this type.  BITPOS is nonzero if
1427   there is an offset outstanding on T that will be applied later.  */
1428
1429void
1430set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1431				 HOST_WIDE_INT bitpos)
1432{
1433  HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1434  tree expr = MEM_EXPR (ref);
1435  rtx offset = MEM_OFFSET (ref);
1436  rtx size = MEM_SIZE (ref);
1437  unsigned int align = MEM_ALIGN (ref);
1438  HOST_WIDE_INT apply_bitpos = 0;
1439  tree type;
1440
1441  /* It can happen that type_for_mode was given a mode for which there
1442     is no language-level type.  In which case it returns NULL, which
1443     we can see here.  */
1444  if (t == NULL_TREE)
1445    return;
1446
1447  type = TYPE_P (t) ? t : TREE_TYPE (t);
1448  if (type == error_mark_node)
1449    return;
1450
1451  /* If we have already set DECL_RTL = ref, get_alias_set will get the
1452     wrong answer, as it assumes that DECL_RTL already has the right alias
1453     info.  Callers should not set DECL_RTL until after the call to
1454     set_mem_attributes.  */
1455  gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1456
1457  /* Get the alias set from the expression or type (perhaps using a
1458     front-end routine) and use it.  */
1459  alias = get_alias_set (t);
1460
1461  MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1462  MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1463  MEM_POINTER (ref) = POINTER_TYPE_P (type);
1464
1465  /* If we are making an object of this type, or if this is a DECL, we know
1466     that it is a scalar if the type is not an aggregate.  */
1467  if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1468    MEM_SCALAR_P (ref) = 1;
1469
1470  /* We can set the alignment from the type if we are making an object,
1471     this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1472  if (objectp || TREE_CODE (t) == INDIRECT_REF
1473      || TREE_CODE (t) == ALIGN_INDIRECT_REF
1474      || TYPE_ALIGN_OK (type))
1475    align = MAX (align, TYPE_ALIGN (type));
1476  else
1477    if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1478      {
1479	if (integer_zerop (TREE_OPERAND (t, 1)))
1480	  /* We don't know anything about the alignment.  */
1481	  align = BITS_PER_UNIT;
1482	else
1483	  align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1484      }
1485
1486  /* If the size is known, we can set that.  */
1487  if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1488    size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1489
1490  /* If T is not a type, we may be able to deduce some more information about
1491     the expression.  */
1492  if (! TYPE_P (t))
1493    {
1494      tree base;
1495
1496      if (TREE_THIS_VOLATILE (t))
1497	MEM_VOLATILE_P (ref) = 1;
1498
1499      /* Now remove any conversions: they don't change what the underlying
1500	 object is.  Likewise for SAVE_EXPR.  */
1501      while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1502	     || TREE_CODE (t) == NON_LVALUE_EXPR
1503	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1504	     || TREE_CODE (t) == SAVE_EXPR)
1505	t = TREE_OPERAND (t, 0);
1506
1507      /* We may look through structure-like accesses for the purposes of
1508	 examining TREE_THIS_NOTRAP, but not array-like accesses.  */
1509      base = t;
1510      while (TREE_CODE (base) == COMPONENT_REF
1511	     || TREE_CODE (base) == REALPART_EXPR
1512	     || TREE_CODE (base) == IMAGPART_EXPR
1513	     || TREE_CODE (base) == BIT_FIELD_REF)
1514	base = TREE_OPERAND (base, 0);
1515
1516      if (DECL_P (base))
1517	{
1518	  if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1519	    MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1520	  else
1521	    MEM_NOTRAP_P (ref) = 1;
1522	}
1523      else
1524	MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1525
1526      base = get_base_address (base);
1527      if (base && DECL_P (base)
1528	  && TREE_READONLY (base)
1529	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1530	{
1531	  tree base_type = TREE_TYPE (base);
1532	  gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1533		      || DECL_ARTIFICIAL (base));
1534	  MEM_READONLY_P (ref) = 1;
1535	}
1536
1537      /* If this expression uses it's parent's alias set, mark it such
1538	 that we won't change it.  */
1539      if (component_uses_parent_alias_set (t))
1540	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1541
1542      /* If this is a decl, set the attributes of the MEM from it.  */
1543      if (DECL_P (t))
1544	{
1545	  expr = t;
1546	  offset = const0_rtx;
1547	  apply_bitpos = bitpos;
1548	  size = (DECL_SIZE_UNIT (t)
1549		  && host_integerp (DECL_SIZE_UNIT (t), 1)
1550		  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1551	  align = DECL_ALIGN (t);
1552	}
1553
1554      /* If this is a constant, we know the alignment.  */
1555      else if (CONSTANT_CLASS_P (t))
1556	{
1557	  align = TYPE_ALIGN (type);
1558#ifdef CONSTANT_ALIGNMENT
1559	  align = CONSTANT_ALIGNMENT (t, align);
1560#endif
1561	}
1562
1563      /* If this is a field reference and not a bit-field, record it.  */
1564      /* ??? There is some information that can be gleened from bit-fields,
1565	 such as the word offset in the structure that might be modified.
1566	 But skip it for now.  */
1567      else if (TREE_CODE (t) == COMPONENT_REF
1568	       && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1569	{
1570	  expr = component_ref_for_mem_expr (t);
1571	  offset = const0_rtx;
1572	  apply_bitpos = bitpos;
1573	  /* ??? Any reason the field size would be different than
1574	     the size we got from the type?  */
1575	}
1576
1577      /* If this is an array reference, look for an outer field reference.  */
1578      else if (TREE_CODE (t) == ARRAY_REF)
1579	{
1580	  tree off_tree = size_zero_node;
1581	  /* We can't modify t, because we use it at the end of the
1582	     function.  */
1583	  tree t2 = t;
1584
1585	  do
1586	    {
1587	      tree index = TREE_OPERAND (t2, 1);
1588	      tree low_bound = array_ref_low_bound (t2);
1589	      tree unit_size = array_ref_element_size (t2);
1590
1591	      /* We assume all arrays have sizes that are a multiple of a byte.
1592		 First subtract the lower bound, if any, in the type of the
1593		 index, then convert to sizetype and multiply by the size of
1594		 the array element.  */
1595	      if (! integer_zerop (low_bound))
1596		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1597				     index, low_bound);
1598
1599	      off_tree = size_binop (PLUS_EXPR,
1600				     size_binop (MULT_EXPR,
1601						 fold_convert (sizetype,
1602							       index),
1603						 unit_size),
1604				     off_tree);
1605	      t2 = TREE_OPERAND (t2, 0);
1606	    }
1607	  while (TREE_CODE (t2) == ARRAY_REF);
1608
1609	  if (DECL_P (t2))
1610	    {
1611	      expr = t2;
1612	      offset = NULL;
1613	      if (host_integerp (off_tree, 1))
1614		{
1615		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1616		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1617		  align = DECL_ALIGN (t2);
1618		  if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1619	            align = aoff;
1620		  offset = GEN_INT (ioff);
1621		  apply_bitpos = bitpos;
1622		}
1623	    }
1624	  else if (TREE_CODE (t2) == COMPONENT_REF)
1625	    {
1626	      expr = component_ref_for_mem_expr (t2);
1627	      if (host_integerp (off_tree, 1))
1628		{
1629		  offset = GEN_INT (tree_low_cst (off_tree, 1));
1630		  apply_bitpos = bitpos;
1631		}
1632	      /* ??? Any reason the field size would be different than
1633		 the size we got from the type?  */
1634	    }
1635	  else if (flag_argument_noalias > 1
1636		   && (INDIRECT_REF_P (t2))
1637		   && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1638	    {
1639	      expr = t2;
1640	      offset = NULL;
1641	    }
1642	}
1643
1644      /* If this is a Fortran indirect argument reference, record the
1645	 parameter decl.  */
1646      else if (flag_argument_noalias > 1
1647	       && (INDIRECT_REF_P (t))
1648	       && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1649	{
1650	  expr = t;
1651	  offset = NULL;
1652	}
1653    }
1654
1655  /* If we modified OFFSET based on T, then subtract the outstanding
1656     bit position offset.  Similarly, increase the size of the accessed
1657     object to contain the negative offset.  */
1658  if (apply_bitpos)
1659    {
1660      offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1661      if (size)
1662	size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1663    }
1664
1665  if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1666    {
1667      /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1668	 we're overlapping.  */
1669      offset = NULL;
1670      expr = NULL;
1671    }
1672
1673  /* Now set the attributes we computed above.  */
1674  MEM_ATTRS (ref)
1675    = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1676
1677  /* If this is already known to be a scalar or aggregate, we are done.  */
1678  if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1679    return;
1680
1681  /* If it is a reference into an aggregate, this is part of an aggregate.
1682     Otherwise we don't know.  */
1683  else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1684	   || TREE_CODE (t) == ARRAY_RANGE_REF
1685	   || TREE_CODE (t) == BIT_FIELD_REF)
1686    MEM_IN_STRUCT_P (ref) = 1;
1687}
1688
1689void
1690set_mem_attributes (rtx ref, tree t, int objectp)
1691{
1692  set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1693}
1694
1695/* Set the decl for MEM to DECL.  */
1696
1697void
1698set_mem_attrs_from_reg (rtx mem, rtx reg)
1699{
1700  MEM_ATTRS (mem)
1701    = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1702		     GEN_INT (REG_OFFSET (reg)),
1703		     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1704}
1705
1706/* Set the alias set of MEM to SET.  */
1707
1708void
1709set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1710{
1711#ifdef ENABLE_CHECKING
1712  /* If the new and old alias sets don't conflict, something is wrong.  */
1713  gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1714#endif
1715
1716  MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1717				   MEM_SIZE (mem), MEM_ALIGN (mem),
1718				   GET_MODE (mem));
1719}
1720
1721/* Set the alignment of MEM to ALIGN bits.  */
1722
1723void
1724set_mem_align (rtx mem, unsigned int align)
1725{
1726  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1727				   MEM_OFFSET (mem), MEM_SIZE (mem), align,
1728				   GET_MODE (mem));
1729}
1730
1731/* Set the expr for MEM to EXPR.  */
1732
1733void
1734set_mem_expr (rtx mem, tree expr)
1735{
1736  MEM_ATTRS (mem)
1737    = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1738		     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1739}
1740
1741/* Set the offset of MEM to OFFSET.  */
1742
1743void
1744set_mem_offset (rtx mem, rtx offset)
1745{
1746  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1747				   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1748				   GET_MODE (mem));
1749}
1750
1751/* Set the size of MEM to SIZE.  */
1752
1753void
1754set_mem_size (rtx mem, rtx size)
1755{
1756  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1757				   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1758				   GET_MODE (mem));
1759}
1760
1761/* Return a memory reference like MEMREF, but with its mode changed to MODE
1762   and its address changed to ADDR.  (VOIDmode means don't change the mode.
1763   NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1764   returned memory location is required to be valid.  The memory
1765   attributes are not changed.  */
1766
1767static rtx
1768change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1769{
1770  rtx new;
1771
1772  gcc_assert (MEM_P (memref));
1773  if (mode == VOIDmode)
1774    mode = GET_MODE (memref);
1775  if (addr == 0)
1776    addr = XEXP (memref, 0);
1777  if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1778      && (!validate || memory_address_p (mode, addr)))
1779    return memref;
1780
1781  if (validate)
1782    {
1783      if (reload_in_progress || reload_completed)
1784	gcc_assert (memory_address_p (mode, addr));
1785      else
1786	addr = memory_address (mode, addr);
1787    }
1788
1789  if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1790    return memref;
1791
1792  new = gen_rtx_MEM (mode, addr);
1793  MEM_COPY_ATTRIBUTES (new, memref);
1794  return new;
1795}
1796
1797/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1798   way we are changing MEMREF, so we only preserve the alias set.  */
1799
1800rtx
1801change_address (rtx memref, enum machine_mode mode, rtx addr)
1802{
1803  rtx new = change_address_1 (memref, mode, addr, 1), size;
1804  enum machine_mode mmode = GET_MODE (new);
1805  unsigned int align;
1806
1807  size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1808  align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1809
1810  /* If there are no changes, just return the original memory reference.  */
1811  if (new == memref)
1812    {
1813      if (MEM_ATTRS (memref) == 0
1814	  || (MEM_EXPR (memref) == NULL
1815	      && MEM_OFFSET (memref) == NULL
1816	      && MEM_SIZE (memref) == size
1817	      && MEM_ALIGN (memref) == align))
1818	return new;
1819
1820      new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1821      MEM_COPY_ATTRIBUTES (new, memref);
1822    }
1823
1824  MEM_ATTRS (new)
1825    = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1826
1827  return new;
1828}
1829
1830/* Return a memory reference like MEMREF, but with its mode changed
1831   to MODE and its address offset by OFFSET bytes.  If VALIDATE is
1832   nonzero, the memory address is forced to be valid.
1833   If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1834   and caller is responsible for adjusting MEMREF base register.  */
1835
1836rtx
1837adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1838		  int validate, int adjust)
1839{
1840  rtx addr = XEXP (memref, 0);
1841  rtx new;
1842  rtx memoffset = MEM_OFFSET (memref);
1843  rtx size = 0;
1844  unsigned int memalign = MEM_ALIGN (memref);
1845
1846  /* If there are no changes, just return the original memory reference.  */
1847  if (mode == GET_MODE (memref) && !offset
1848      && (!validate || memory_address_p (mode, addr)))
1849    return memref;
1850
1851  /* ??? Prefer to create garbage instead of creating shared rtl.
1852     This may happen even if offset is nonzero -- consider
1853     (plus (plus reg reg) const_int) -- so do this always.  */
1854  addr = copy_rtx (addr);
1855
1856  if (adjust)
1857    {
1858      /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1859	 object, we can merge it into the LO_SUM.  */
1860      if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1861	  && offset >= 0
1862	  && (unsigned HOST_WIDE_INT) offset
1863	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1864	addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1865			       plus_constant (XEXP (addr, 1), offset));
1866      else
1867	addr = plus_constant (addr, offset);
1868    }
1869
1870  new = change_address_1 (memref, mode, addr, validate);
1871
1872  /* Compute the new values of the memory attributes due to this adjustment.
1873     We add the offsets and update the alignment.  */
1874  if (memoffset)
1875    memoffset = GEN_INT (offset + INTVAL (memoffset));
1876
1877  /* Compute the new alignment by taking the MIN of the alignment and the
1878     lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1879     if zero.  */
1880  if (offset != 0)
1881    memalign
1882      = MIN (memalign,
1883	     (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1884
1885  /* We can compute the size in a number of ways.  */
1886  if (GET_MODE (new) != BLKmode)
1887    size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1888  else if (MEM_SIZE (memref))
1889    size = plus_constant (MEM_SIZE (memref), -offset);
1890
1891  MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1892				   memoffset, size, memalign, GET_MODE (new));
1893
1894  /* At some point, we should validate that this offset is within the object,
1895     if all the appropriate values are known.  */
1896  return new;
1897}
1898
1899/* Return a memory reference like MEMREF, but with its mode changed
1900   to MODE and its address changed to ADDR, which is assumed to be
1901   MEMREF offseted by OFFSET bytes.  If VALIDATE is
1902   nonzero, the memory address is forced to be valid.  */
1903
1904rtx
1905adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1906			     HOST_WIDE_INT offset, int validate)
1907{
1908  memref = change_address_1 (memref, VOIDmode, addr, validate);
1909  return adjust_address_1 (memref, mode, offset, validate, 0);
1910}
1911
1912/* Return a memory reference like MEMREF, but whose address is changed by
1913   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
1914   known to be in OFFSET (possibly 1).  */
1915
1916rtx
1917offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1918{
1919  rtx new, addr = XEXP (memref, 0);
1920
1921  new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1922
1923  /* At this point we don't know _why_ the address is invalid.  It
1924     could have secondary memory references, multiplies or anything.
1925
1926     However, if we did go and rearrange things, we can wind up not
1927     being able to recognize the magic around pic_offset_table_rtx.
1928     This stuff is fragile, and is yet another example of why it is
1929     bad to expose PIC machinery too early.  */
1930  if (! memory_address_p (GET_MODE (memref), new)
1931      && GET_CODE (addr) == PLUS
1932      && XEXP (addr, 0) == pic_offset_table_rtx)
1933    {
1934      addr = force_reg (GET_MODE (addr), addr);
1935      new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1936    }
1937
1938  update_temp_slot_address (XEXP (memref, 0), new);
1939  new = change_address_1 (memref, VOIDmode, new, 1);
1940
1941  /* If there are no changes, just return the original memory reference.  */
1942  if (new == memref)
1943    return new;
1944
1945  /* Update the alignment to reflect the offset.  Reset the offset, which
1946     we don't know.  */
1947  MEM_ATTRS (new)
1948    = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1949		     MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1950		     GET_MODE (new));
1951  return new;
1952}
1953
1954/* Return a memory reference like MEMREF, but with its address changed to
1955   ADDR.  The caller is asserting that the actual piece of memory pointed
1956   to is the same, just the form of the address is being changed, such as
1957   by putting something into a register.  */
1958
1959rtx
1960replace_equiv_address (rtx memref, rtx addr)
1961{
1962  /* change_address_1 copies the memory attribute structure without change
1963     and that's exactly what we want here.  */
1964  update_temp_slot_address (XEXP (memref, 0), addr);
1965  return change_address_1 (memref, VOIDmode, addr, 1);
1966}
1967
1968/* Likewise, but the reference is not required to be valid.  */
1969
1970rtx
1971replace_equiv_address_nv (rtx memref, rtx addr)
1972{
1973  return change_address_1 (memref, VOIDmode, addr, 0);
1974}
1975
1976/* Return a memory reference like MEMREF, but with its mode widened to
1977   MODE and offset by OFFSET.  This would be used by targets that e.g.
1978   cannot issue QImode memory operations and have to use SImode memory
1979   operations plus masking logic.  */
1980
1981rtx
1982widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1983{
1984  rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1985  tree expr = MEM_EXPR (new);
1986  rtx memoffset = MEM_OFFSET (new);
1987  unsigned int size = GET_MODE_SIZE (mode);
1988
1989  /* If there are no changes, just return the original memory reference.  */
1990  if (new == memref)
1991    return new;
1992
1993  /* If we don't know what offset we were at within the expression, then
1994     we can't know if we've overstepped the bounds.  */
1995  if (! memoffset)
1996    expr = NULL_TREE;
1997
1998  while (expr)
1999    {
2000      if (TREE_CODE (expr) == COMPONENT_REF)
2001	{
2002	  tree field = TREE_OPERAND (expr, 1);
2003	  tree offset = component_ref_field_offset (expr);
2004
2005	  if (! DECL_SIZE_UNIT (field))
2006	    {
2007	      expr = NULL_TREE;
2008	      break;
2009	    }
2010
2011	  /* Is the field at least as large as the access?  If so, ok,
2012	     otherwise strip back to the containing structure.  */
2013	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2014	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2015	      && INTVAL (memoffset) >= 0)
2016	    break;
2017
2018	  if (! host_integerp (offset, 1))
2019	    {
2020	      expr = NULL_TREE;
2021	      break;
2022	    }
2023
2024	  expr = TREE_OPERAND (expr, 0);
2025	  memoffset
2026	    = (GEN_INT (INTVAL (memoffset)
2027			+ tree_low_cst (offset, 1)
2028			+ (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2029			   / BITS_PER_UNIT)));
2030	}
2031      /* Similarly for the decl.  */
2032      else if (DECL_P (expr)
2033	       && DECL_SIZE_UNIT (expr)
2034	       && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2035	       && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2036	       && (! memoffset || INTVAL (memoffset) >= 0))
2037	break;
2038      else
2039	{
2040	  /* The widened memory access overflows the expression, which means
2041	     that it could alias another expression.  Zap it.  */
2042	  expr = NULL_TREE;
2043	  break;
2044	}
2045    }
2046
2047  if (! expr)
2048    memoffset = NULL_RTX;
2049
2050  /* The widened memory may alias other stuff, so zap the alias set.  */
2051  /* ??? Maybe use get_alias_set on any remaining expression.  */
2052
2053  MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2054				   MEM_ALIGN (new), mode);
2055
2056  return new;
2057}
2058
2059/* Return a newly created CODE_LABEL rtx with a unique label number.  */
2060
2061rtx
2062gen_label_rtx (void)
2063{
2064  return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2065			     NULL, label_num++, NULL);
2066}
2067
2068/* For procedure integration.  */
2069
2070/* Install new pointers to the first and last insns in the chain.
2071   Also, set cur_insn_uid to one higher than the last in use.
2072   Used for an inline-procedure after copying the insn chain.  */
2073
2074void
2075set_new_first_and_last_insn (rtx first, rtx last)
2076{
2077  rtx insn;
2078
2079  first_insn = first;
2080  last_insn = last;
2081  cur_insn_uid = 0;
2082
2083  for (insn = first; insn; insn = NEXT_INSN (insn))
2084    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2085
2086  cur_insn_uid++;
2087}
2088
2089/* Go through all the RTL insn bodies and copy any invalid shared
2090   structure.  This routine should only be called once.  */
2091
2092static void
2093unshare_all_rtl_1 (tree fndecl, rtx insn)
2094{
2095  tree decl;
2096
2097  /* Make sure that virtual parameters are not shared.  */
2098  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2099    SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2100
2101  /* Make sure that virtual stack slots are not shared.  */
2102  unshare_all_decls (DECL_INITIAL (fndecl));
2103
2104  /* Unshare just about everything else.  */
2105  unshare_all_rtl_in_chain (insn);
2106
2107  /* Make sure the addresses of stack slots found outside the insn chain
2108     (such as, in DECL_RTL of a variable) are not shared
2109     with the insn chain.
2110
2111     This special care is necessary when the stack slot MEM does not
2112     actually appear in the insn chain.  If it does appear, its address
2113     is unshared from all else at that point.  */
2114  stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2115}
2116
2117/* Go through all the RTL insn bodies and copy any invalid shared
2118   structure, again.  This is a fairly expensive thing to do so it
2119   should be done sparingly.  */
2120
2121void
2122unshare_all_rtl_again (rtx insn)
2123{
2124  rtx p;
2125  tree decl;
2126
2127  for (p = insn; p; p = NEXT_INSN (p))
2128    if (INSN_P (p))
2129      {
2130	reset_used_flags (PATTERN (p));
2131	reset_used_flags (REG_NOTES (p));
2132	reset_used_flags (LOG_LINKS (p));
2133      }
2134
2135  /* Make sure that virtual stack slots are not shared.  */
2136  reset_used_decls (DECL_INITIAL (cfun->decl));
2137
2138  /* Make sure that virtual parameters are not shared.  */
2139  for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2140    reset_used_flags (DECL_RTL (decl));
2141
2142  reset_used_flags (stack_slot_list);
2143
2144  unshare_all_rtl_1 (cfun->decl, insn);
2145}
2146
2147unsigned int
2148unshare_all_rtl (void)
2149{
2150  unshare_all_rtl_1 (current_function_decl, get_insns ());
2151  return 0;
2152}
2153
2154struct tree_opt_pass pass_unshare_all_rtl =
2155{
2156  "unshare",                            /* name */
2157  NULL,                                 /* gate */
2158  unshare_all_rtl,                      /* execute */
2159  NULL,                                 /* sub */
2160  NULL,                                 /* next */
2161  0,                                    /* static_pass_number */
2162  0,                                    /* tv_id */
2163  0,                                    /* properties_required */
2164  0,                                    /* properties_provided */
2165  0,                                    /* properties_destroyed */
2166  0,                                    /* todo_flags_start */
2167  TODO_dump_func,                       /* todo_flags_finish */
2168  0                                     /* letter */
2169};
2170
2171
2172/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2173   Recursively does the same for subexpressions.  */
2174
2175static void
2176verify_rtx_sharing (rtx orig, rtx insn)
2177{
2178  rtx x = orig;
2179  int i;
2180  enum rtx_code code;
2181  const char *format_ptr;
2182
2183  if (x == 0)
2184    return;
2185
2186  code = GET_CODE (x);
2187
2188  /* These types may be freely shared.  */
2189
2190  switch (code)
2191    {
2192    case REG:
2193    case CONST_INT:
2194    case CONST_DOUBLE:
2195    case CONST_VECTOR:
2196    case SYMBOL_REF:
2197    case LABEL_REF:
2198    case CODE_LABEL:
2199    case PC:
2200    case CC0:
2201    case SCRATCH:
2202      return;
2203      /* SCRATCH must be shared because they represent distinct values.  */
2204    case CLOBBER:
2205      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2206	return;
2207      break;
2208
2209    case CONST:
2210      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2211	 a LABEL_REF, it isn't sharable.  */
2212      if (GET_CODE (XEXP (x, 0)) == PLUS
2213	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2214	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2215	return;
2216      break;
2217
2218    case MEM:
2219      /* A MEM is allowed to be shared if its address is constant.  */
2220      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2221	  || reload_completed || reload_in_progress)
2222	return;
2223
2224      break;
2225
2226    default:
2227      break;
2228    }
2229
2230  /* This rtx may not be shared.  If it has already been seen,
2231     replace it with a copy of itself.  */
2232#ifdef ENABLE_CHECKING
2233  if (RTX_FLAG (x, used))
2234    {
2235      error ("invalid rtl sharing found in the insn");
2236      debug_rtx (insn);
2237      error ("shared rtx");
2238      debug_rtx (x);
2239      internal_error ("internal consistency failure");
2240    }
2241#endif
2242  gcc_assert (!RTX_FLAG (x, used));
2243
2244  RTX_FLAG (x, used) = 1;
2245
2246  /* Now scan the subexpressions recursively.  */
2247
2248  format_ptr = GET_RTX_FORMAT (code);
2249
2250  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2251    {
2252      switch (*format_ptr++)
2253	{
2254	case 'e':
2255	  verify_rtx_sharing (XEXP (x, i), insn);
2256	  break;
2257
2258	case 'E':
2259	  if (XVEC (x, i) != NULL)
2260	    {
2261	      int j;
2262	      int len = XVECLEN (x, i);
2263
2264	      for (j = 0; j < len; j++)
2265		{
2266		  /* We allow sharing of ASM_OPERANDS inside single
2267		     instruction.  */
2268		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2269		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2270			  == ASM_OPERANDS))
2271		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2272		  else
2273		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2274		}
2275	    }
2276	  break;
2277	}
2278    }
2279  return;
2280}
2281
2282/* Go through all the RTL insn bodies and check that there is no unexpected
2283   sharing in between the subexpressions.  */
2284
2285void
2286verify_rtl_sharing (void)
2287{
2288  rtx p;
2289
2290  for (p = get_insns (); p; p = NEXT_INSN (p))
2291    if (INSN_P (p))
2292      {
2293	reset_used_flags (PATTERN (p));
2294	reset_used_flags (REG_NOTES (p));
2295	reset_used_flags (LOG_LINKS (p));
2296      }
2297
2298  for (p = get_insns (); p; p = NEXT_INSN (p))
2299    if (INSN_P (p))
2300      {
2301	verify_rtx_sharing (PATTERN (p), p);
2302	verify_rtx_sharing (REG_NOTES (p), p);
2303	verify_rtx_sharing (LOG_LINKS (p), p);
2304      }
2305}
2306
2307/* Go through all the RTL insn bodies and copy any invalid shared structure.
2308   Assumes the mark bits are cleared at entry.  */
2309
2310void
2311unshare_all_rtl_in_chain (rtx insn)
2312{
2313  for (; insn; insn = NEXT_INSN (insn))
2314    if (INSN_P (insn))
2315      {
2316	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2317	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2318	LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2319      }
2320}
2321
2322/* Go through all virtual stack slots of a function and copy any
2323   shared structure.  */
2324static void
2325unshare_all_decls (tree blk)
2326{
2327  tree t;
2328
2329  /* Copy shared decls.  */
2330  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2331    if (DECL_RTL_SET_P (t))
2332      SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2333
2334  /* Now process sub-blocks.  */
2335  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2336    unshare_all_decls (t);
2337}
2338
2339/* Go through all virtual stack slots of a function and mark them as
2340   not shared.  */
2341static void
2342reset_used_decls (tree blk)
2343{
2344  tree t;
2345
2346  /* Mark decls.  */
2347  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2348    if (DECL_RTL_SET_P (t))
2349      reset_used_flags (DECL_RTL (t));
2350
2351  /* Now process sub-blocks.  */
2352  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2353    reset_used_decls (t);
2354}
2355
2356/* Mark ORIG as in use, and return a copy of it if it was already in use.
2357   Recursively does the same for subexpressions.  Uses
2358   copy_rtx_if_shared_1 to reduce stack space.  */
2359
2360rtx
2361copy_rtx_if_shared (rtx orig)
2362{
2363  copy_rtx_if_shared_1 (&orig);
2364  return orig;
2365}
2366
2367/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2368   use.  Recursively does the same for subexpressions.  */
2369
2370static void
2371copy_rtx_if_shared_1 (rtx *orig1)
2372{
2373  rtx x;
2374  int i;
2375  enum rtx_code code;
2376  rtx *last_ptr;
2377  const char *format_ptr;
2378  int copied = 0;
2379  int length;
2380
2381  /* Repeat is used to turn tail-recursion into iteration.  */
2382repeat:
2383  x = *orig1;
2384
2385  if (x == 0)
2386    return;
2387
2388  code = GET_CODE (x);
2389
2390  /* These types may be freely shared.  */
2391
2392  switch (code)
2393    {
2394    case REG:
2395    case CONST_INT:
2396    case CONST_DOUBLE:
2397    case CONST_VECTOR:
2398    case SYMBOL_REF:
2399    case LABEL_REF:
2400    case CODE_LABEL:
2401    case PC:
2402    case CC0:
2403    case SCRATCH:
2404      /* SCRATCH must be shared because they represent distinct values.  */
2405      return;
2406    case CLOBBER:
2407      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2408	return;
2409      break;
2410
2411    case CONST:
2412      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2413	 a LABEL_REF, it isn't sharable.  */
2414      if (GET_CODE (XEXP (x, 0)) == PLUS
2415	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2416	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2417	return;
2418      break;
2419
2420    case INSN:
2421    case JUMP_INSN:
2422    case CALL_INSN:
2423    case NOTE:
2424    case BARRIER:
2425      /* The chain of insns is not being copied.  */
2426      return;
2427
2428    default:
2429      break;
2430    }
2431
2432  /* This rtx may not be shared.  If it has already been seen,
2433     replace it with a copy of itself.  */
2434
2435  if (RTX_FLAG (x, used))
2436    {
2437      x = shallow_copy_rtx (x);
2438      copied = 1;
2439    }
2440  RTX_FLAG (x, used) = 1;
2441
2442  /* Now scan the subexpressions recursively.
2443     We can store any replaced subexpressions directly into X
2444     since we know X is not shared!  Any vectors in X
2445     must be copied if X was copied.  */
2446
2447  format_ptr = GET_RTX_FORMAT (code);
2448  length = GET_RTX_LENGTH (code);
2449  last_ptr = NULL;
2450
2451  for (i = 0; i < length; i++)
2452    {
2453      switch (*format_ptr++)
2454	{
2455	case 'e':
2456          if (last_ptr)
2457            copy_rtx_if_shared_1 (last_ptr);
2458	  last_ptr = &XEXP (x, i);
2459	  break;
2460
2461	case 'E':
2462	  if (XVEC (x, i) != NULL)
2463	    {
2464	      int j;
2465	      int len = XVECLEN (x, i);
2466
2467              /* Copy the vector iff I copied the rtx and the length
2468		 is nonzero.  */
2469	      if (copied && len > 0)
2470		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2471
2472              /* Call recursively on all inside the vector.  */
2473	      for (j = 0; j < len; j++)
2474                {
2475		  if (last_ptr)
2476		    copy_rtx_if_shared_1 (last_ptr);
2477                  last_ptr = &XVECEXP (x, i, j);
2478                }
2479	    }
2480	  break;
2481	}
2482    }
2483  *orig1 = x;
2484  if (last_ptr)
2485    {
2486      orig1 = last_ptr;
2487      goto repeat;
2488    }
2489  return;
2490}
2491
2492/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2493   to look for shared sub-parts.  */
2494
2495void
2496reset_used_flags (rtx x)
2497{
2498  int i, j;
2499  enum rtx_code code;
2500  const char *format_ptr;
2501  int length;
2502
2503  /* Repeat is used to turn tail-recursion into iteration.  */
2504repeat:
2505  if (x == 0)
2506    return;
2507
2508  code = GET_CODE (x);
2509
2510  /* These types may be freely shared so we needn't do any resetting
2511     for them.  */
2512
2513  switch (code)
2514    {
2515    case REG:
2516    case CONST_INT:
2517    case CONST_DOUBLE:
2518    case CONST_VECTOR:
2519    case SYMBOL_REF:
2520    case CODE_LABEL:
2521    case PC:
2522    case CC0:
2523      return;
2524
2525    case INSN:
2526    case JUMP_INSN:
2527    case CALL_INSN:
2528    case NOTE:
2529    case LABEL_REF:
2530    case BARRIER:
2531      /* The chain of insns is not being copied.  */
2532      return;
2533
2534    default:
2535      break;
2536    }
2537
2538  RTX_FLAG (x, used) = 0;
2539
2540  format_ptr = GET_RTX_FORMAT (code);
2541  length = GET_RTX_LENGTH (code);
2542
2543  for (i = 0; i < length; i++)
2544    {
2545      switch (*format_ptr++)
2546	{
2547	case 'e':
2548          if (i == length-1)
2549            {
2550              x = XEXP (x, i);
2551	      goto repeat;
2552            }
2553	  reset_used_flags (XEXP (x, i));
2554	  break;
2555
2556	case 'E':
2557	  for (j = 0; j < XVECLEN (x, i); j++)
2558	    reset_used_flags (XVECEXP (x, i, j));
2559	  break;
2560	}
2561    }
2562}
2563
2564/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2565   to look for shared sub-parts.  */
2566
2567void
2568set_used_flags (rtx x)
2569{
2570  int i, j;
2571  enum rtx_code code;
2572  const char *format_ptr;
2573
2574  if (x == 0)
2575    return;
2576
2577  code = GET_CODE (x);
2578
2579  /* These types may be freely shared so we needn't do any resetting
2580     for them.  */
2581
2582  switch (code)
2583    {
2584    case REG:
2585    case CONST_INT:
2586    case CONST_DOUBLE:
2587    case CONST_VECTOR:
2588    case SYMBOL_REF:
2589    case CODE_LABEL:
2590    case PC:
2591    case CC0:
2592      return;
2593
2594    case INSN:
2595    case JUMP_INSN:
2596    case CALL_INSN:
2597    case NOTE:
2598    case LABEL_REF:
2599    case BARRIER:
2600      /* The chain of insns is not being copied.  */
2601      return;
2602
2603    default:
2604      break;
2605    }
2606
2607  RTX_FLAG (x, used) = 1;
2608
2609  format_ptr = GET_RTX_FORMAT (code);
2610  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2611    {
2612      switch (*format_ptr++)
2613	{
2614	case 'e':
2615	  set_used_flags (XEXP (x, i));
2616	  break;
2617
2618	case 'E':
2619	  for (j = 0; j < XVECLEN (x, i); j++)
2620	    set_used_flags (XVECEXP (x, i, j));
2621	  break;
2622	}
2623    }
2624}
2625
2626/* Copy X if necessary so that it won't be altered by changes in OTHER.
2627   Return X or the rtx for the pseudo reg the value of X was copied into.
2628   OTHER must be valid as a SET_DEST.  */
2629
2630rtx
2631make_safe_from (rtx x, rtx other)
2632{
2633  while (1)
2634    switch (GET_CODE (other))
2635      {
2636      case SUBREG:
2637	other = SUBREG_REG (other);
2638	break;
2639      case STRICT_LOW_PART:
2640      case SIGN_EXTEND:
2641      case ZERO_EXTEND:
2642	other = XEXP (other, 0);
2643	break;
2644      default:
2645	goto done;
2646      }
2647 done:
2648  if ((MEM_P (other)
2649       && ! CONSTANT_P (x)
2650       && !REG_P (x)
2651       && GET_CODE (x) != SUBREG)
2652      || (REG_P (other)
2653	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
2654	      || reg_mentioned_p (other, x))))
2655    {
2656      rtx temp = gen_reg_rtx (GET_MODE (x));
2657      emit_move_insn (temp, x);
2658      return temp;
2659    }
2660  return x;
2661}
2662
2663/* Emission of insns (adding them to the doubly-linked list).  */
2664
2665/* Return the first insn of the current sequence or current function.  */
2666
2667rtx
2668get_insns (void)
2669{
2670  return first_insn;
2671}
2672
2673/* Specify a new insn as the first in the chain.  */
2674
2675void
2676set_first_insn (rtx insn)
2677{
2678  gcc_assert (!PREV_INSN (insn));
2679  first_insn = insn;
2680}
2681
2682/* Return the last insn emitted in current sequence or current function.  */
2683
2684rtx
2685get_last_insn (void)
2686{
2687  return last_insn;
2688}
2689
2690/* Specify a new insn as the last in the chain.  */
2691
2692void
2693set_last_insn (rtx insn)
2694{
2695  gcc_assert (!NEXT_INSN (insn));
2696  last_insn = insn;
2697}
2698
2699/* Return the last insn emitted, even if it is in a sequence now pushed.  */
2700
2701rtx
2702get_last_insn_anywhere (void)
2703{
2704  struct sequence_stack *stack;
2705  if (last_insn)
2706    return last_insn;
2707  for (stack = seq_stack; stack; stack = stack->next)
2708    if (stack->last != 0)
2709      return stack->last;
2710  return 0;
2711}
2712
2713/* Return the first nonnote insn emitted in current sequence or current
2714   function.  This routine looks inside SEQUENCEs.  */
2715
2716rtx
2717get_first_nonnote_insn (void)
2718{
2719  rtx insn = first_insn;
2720
2721  if (insn)
2722    {
2723      if (NOTE_P (insn))
2724	for (insn = next_insn (insn);
2725	     insn && NOTE_P (insn);
2726	     insn = next_insn (insn))
2727	  continue;
2728      else
2729	{
2730	  if (NONJUMP_INSN_P (insn)
2731	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2732	    insn = XVECEXP (PATTERN (insn), 0, 0);
2733	}
2734    }
2735
2736  return insn;
2737}
2738
2739/* Return the last nonnote insn emitted in current sequence or current
2740   function.  This routine looks inside SEQUENCEs.  */
2741
2742rtx
2743get_last_nonnote_insn (void)
2744{
2745  rtx insn = last_insn;
2746
2747  if (insn)
2748    {
2749      if (NOTE_P (insn))
2750	for (insn = previous_insn (insn);
2751	     insn && NOTE_P (insn);
2752	     insn = previous_insn (insn))
2753	  continue;
2754      else
2755	{
2756	  if (NONJUMP_INSN_P (insn)
2757	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2758	    insn = XVECEXP (PATTERN (insn), 0,
2759			    XVECLEN (PATTERN (insn), 0) - 1);
2760	}
2761    }
2762
2763  return insn;
2764}
2765
2766/* Return a number larger than any instruction's uid in this function.  */
2767
2768int
2769get_max_uid (void)
2770{
2771  return cur_insn_uid;
2772}
2773
2774/* Renumber instructions so that no instruction UIDs are wasted.  */
2775
2776void
2777renumber_insns (void)
2778{
2779  rtx insn;
2780
2781  /* If we're not supposed to renumber instructions, don't.  */
2782  if (!flag_renumber_insns)
2783    return;
2784
2785  /* If there aren't that many instructions, then it's not really
2786     worth renumbering them.  */
2787  if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2788    return;
2789
2790  cur_insn_uid = 1;
2791
2792  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2793    {
2794      if (dump_file)
2795	fprintf (dump_file, "Renumbering insn %d to %d\n",
2796		 INSN_UID (insn), cur_insn_uid);
2797      INSN_UID (insn) = cur_insn_uid++;
2798    }
2799}
2800
2801/* Return the next insn.  If it is a SEQUENCE, return the first insn
2802   of the sequence.  */
2803
2804rtx
2805next_insn (rtx insn)
2806{
2807  if (insn)
2808    {
2809      insn = NEXT_INSN (insn);
2810      if (insn && NONJUMP_INSN_P (insn)
2811	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
2812	insn = XVECEXP (PATTERN (insn), 0, 0);
2813    }
2814
2815  return insn;
2816}
2817
2818/* Return the previous insn.  If it is a SEQUENCE, return the last insn
2819   of the sequence.  */
2820
2821rtx
2822previous_insn (rtx insn)
2823{
2824  if (insn)
2825    {
2826      insn = PREV_INSN (insn);
2827      if (insn && NONJUMP_INSN_P (insn)
2828	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
2829	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2830    }
2831
2832  return insn;
2833}
2834
2835/* Return the next insn after INSN that is not a NOTE.  This routine does not
2836   look inside SEQUENCEs.  */
2837
2838rtx
2839next_nonnote_insn (rtx insn)
2840{
2841  while (insn)
2842    {
2843      insn = NEXT_INSN (insn);
2844      if (insn == 0 || !NOTE_P (insn))
2845	break;
2846    }
2847
2848  return insn;
2849}
2850
2851/* Return the previous insn before INSN that is not a NOTE.  This routine does
2852   not look inside SEQUENCEs.  */
2853
2854rtx
2855prev_nonnote_insn (rtx insn)
2856{
2857  while (insn)
2858    {
2859      insn = PREV_INSN (insn);
2860      if (insn == 0 || !NOTE_P (insn))
2861	break;
2862    }
2863
2864  return insn;
2865}
2866
2867/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2868   or 0, if there is none.  This routine does not look inside
2869   SEQUENCEs.  */
2870
2871rtx
2872next_real_insn (rtx insn)
2873{
2874  while (insn)
2875    {
2876      insn = NEXT_INSN (insn);
2877      if (insn == 0 || INSN_P (insn))
2878	break;
2879    }
2880
2881  return insn;
2882}
2883
2884/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2885   or 0, if there is none.  This routine does not look inside
2886   SEQUENCEs.  */
2887
2888rtx
2889prev_real_insn (rtx insn)
2890{
2891  while (insn)
2892    {
2893      insn = PREV_INSN (insn);
2894      if (insn == 0 || INSN_P (insn))
2895	break;
2896    }
2897
2898  return insn;
2899}
2900
2901/* Return the last CALL_INSN in the current list, or 0 if there is none.
2902   This routine does not look inside SEQUENCEs.  */
2903
2904rtx
2905last_call_insn (void)
2906{
2907  rtx insn;
2908
2909  for (insn = get_last_insn ();
2910       insn && !CALL_P (insn);
2911       insn = PREV_INSN (insn))
2912    ;
2913
2914  return insn;
2915}
2916
2917/* Find the next insn after INSN that really does something.  This routine
2918   does not look inside SEQUENCEs.  Until reload has completed, this is the
2919   same as next_real_insn.  */
2920
2921int
2922active_insn_p (rtx insn)
2923{
2924  return (CALL_P (insn) || JUMP_P (insn)
2925	  || (NONJUMP_INSN_P (insn)
2926	      && (! reload_completed
2927		  || (GET_CODE (PATTERN (insn)) != USE
2928		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
2929}
2930
2931rtx
2932next_active_insn (rtx insn)
2933{
2934  while (insn)
2935    {
2936      insn = NEXT_INSN (insn);
2937      if (insn == 0 || active_insn_p (insn))
2938	break;
2939    }
2940
2941  return insn;
2942}
2943
2944/* Find the last insn before INSN that really does something.  This routine
2945   does not look inside SEQUENCEs.  Until reload has completed, this is the
2946   same as prev_real_insn.  */
2947
2948rtx
2949prev_active_insn (rtx insn)
2950{
2951  while (insn)
2952    {
2953      insn = PREV_INSN (insn);
2954      if (insn == 0 || active_insn_p (insn))
2955	break;
2956    }
2957
2958  return insn;
2959}
2960
2961/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
2962
2963rtx
2964next_label (rtx insn)
2965{
2966  while (insn)
2967    {
2968      insn = NEXT_INSN (insn);
2969      if (insn == 0 || LABEL_P (insn))
2970	break;
2971    }
2972
2973  return insn;
2974}
2975
2976/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */
2977
2978rtx
2979prev_label (rtx insn)
2980{
2981  while (insn)
2982    {
2983      insn = PREV_INSN (insn);
2984      if (insn == 0 || LABEL_P (insn))
2985	break;
2986    }
2987
2988  return insn;
2989}
2990
2991/* Return the last label to mark the same position as LABEL.  Return null
2992   if LABEL itself is null.  */
2993
2994rtx
2995skip_consecutive_labels (rtx label)
2996{
2997  rtx insn;
2998
2999  for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3000    if (LABEL_P (insn))
3001      label = insn;
3002
3003  return label;
3004}
3005
3006#ifdef HAVE_cc0
3007/* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3008   and REG_CC_USER notes so we can find it.  */
3009
3010void
3011link_cc0_insns (rtx insn)
3012{
3013  rtx user = next_nonnote_insn (insn);
3014
3015  if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3016    user = XVECEXP (PATTERN (user), 0, 0);
3017
3018  REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3019					REG_NOTES (user));
3020  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3021}
3022
3023/* Return the next insn that uses CC0 after INSN, which is assumed to
3024   set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3025   applied to the result of this function should yield INSN).
3026
3027   Normally, this is simply the next insn.  However, if a REG_CC_USER note
3028   is present, it contains the insn that uses CC0.
3029
3030   Return 0 if we can't find the insn.  */
3031
3032rtx
3033next_cc0_user (rtx insn)
3034{
3035  rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3036
3037  if (note)
3038    return XEXP (note, 0);
3039
3040  insn = next_nonnote_insn (insn);
3041  if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3042    insn = XVECEXP (PATTERN (insn), 0, 0);
3043
3044  if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3045    return insn;
3046
3047  return 0;
3048}
3049
3050/* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3051   note, it is the previous insn.  */
3052
3053rtx
3054prev_cc0_setter (rtx insn)
3055{
3056  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3057
3058  if (note)
3059    return XEXP (note, 0);
3060
3061  insn = prev_nonnote_insn (insn);
3062  gcc_assert (sets_cc0_p (PATTERN (insn)));
3063
3064  return insn;
3065}
3066#endif
3067
3068/* Increment the label uses for all labels present in rtx.  */
3069
3070static void
3071mark_label_nuses (rtx x)
3072{
3073  enum rtx_code code;
3074  int i, j;
3075  const char *fmt;
3076
3077  code = GET_CODE (x);
3078  if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3079    LABEL_NUSES (XEXP (x, 0))++;
3080
3081  fmt = GET_RTX_FORMAT (code);
3082  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3083    {
3084      if (fmt[i] == 'e')
3085	mark_label_nuses (XEXP (x, i));
3086      else if (fmt[i] == 'E')
3087	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3088	  mark_label_nuses (XVECEXP (x, i, j));
3089    }
3090}
3091
3092
3093/* Try splitting insns that can be split for better scheduling.
3094   PAT is the pattern which might split.
3095   TRIAL is the insn providing PAT.
3096   LAST is nonzero if we should return the last insn of the sequence produced.
3097
3098   If this routine succeeds in splitting, it returns the first or last
3099   replacement insn depending on the value of LAST.  Otherwise, it
3100   returns TRIAL.  If the insn to be returned can be split, it will be.  */
3101
3102rtx
3103try_split (rtx pat, rtx trial, int last)
3104{
3105  rtx before = PREV_INSN (trial);
3106  rtx after = NEXT_INSN (trial);
3107  int has_barrier = 0;
3108  rtx tem;
3109  rtx note, seq;
3110  int probability;
3111  rtx insn_last, insn;
3112  int njumps = 0;
3113
3114  if (any_condjump_p (trial)
3115      && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3116    split_branch_probability = INTVAL (XEXP (note, 0));
3117  probability = split_branch_probability;
3118
3119  seq = split_insns (pat, trial);
3120
3121  split_branch_probability = -1;
3122
3123  /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3124     We may need to handle this specially.  */
3125  if (after && BARRIER_P (after))
3126    {
3127      has_barrier = 1;
3128      after = NEXT_INSN (after);
3129    }
3130
3131  if (!seq)
3132    return trial;
3133
3134  /* Avoid infinite loop if any insn of the result matches
3135     the original pattern.  */
3136  insn_last = seq;
3137  while (1)
3138    {
3139      if (INSN_P (insn_last)
3140	  && rtx_equal_p (PATTERN (insn_last), pat))
3141	return trial;
3142      if (!NEXT_INSN (insn_last))
3143	break;
3144      insn_last = NEXT_INSN (insn_last);
3145    }
3146
3147  /* Mark labels.  */
3148  for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3149    {
3150      if (JUMP_P (insn))
3151	{
3152	  mark_jump_label (PATTERN (insn), insn, 0);
3153	  njumps++;
3154	  if (probability != -1
3155	      && any_condjump_p (insn)
3156	      && !find_reg_note (insn, REG_BR_PROB, 0))
3157	    {
3158	      /* We can preserve the REG_BR_PROB notes only if exactly
3159		 one jump is created, otherwise the machine description
3160		 is responsible for this step using
3161		 split_branch_probability variable.  */
3162	      gcc_assert (njumps == 1);
3163	      REG_NOTES (insn)
3164		= gen_rtx_EXPR_LIST (REG_BR_PROB,
3165				     GEN_INT (probability),
3166				     REG_NOTES (insn));
3167	    }
3168	}
3169    }
3170
3171  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3172     in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
3173  if (CALL_P (trial))
3174    {
3175      for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3176	if (CALL_P (insn))
3177	  {
3178	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3179	    while (*p)
3180	      p = &XEXP (*p, 1);
3181	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3182	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3183	  }
3184    }
3185
3186  /* Copy notes, particularly those related to the CFG.  */
3187  for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3188    {
3189      switch (REG_NOTE_KIND (note))
3190	{
3191	case REG_EH_REGION:
3192	  insn = insn_last;
3193	  while (insn != NULL_RTX)
3194	    {
3195	      if (CALL_P (insn)
3196		  || (flag_non_call_exceptions && INSN_P (insn)
3197		      && may_trap_p (PATTERN (insn))))
3198		REG_NOTES (insn)
3199		  = gen_rtx_EXPR_LIST (REG_EH_REGION,
3200				       XEXP (note, 0),
3201				       REG_NOTES (insn));
3202	      insn = PREV_INSN (insn);
3203	    }
3204	  break;
3205
3206	case REG_NORETURN:
3207	case REG_SETJMP:
3208	  insn = insn_last;
3209	  while (insn != NULL_RTX)
3210	    {
3211	      if (CALL_P (insn))
3212		REG_NOTES (insn)
3213		  = gen_rtx_EXPR_LIST (GET_MODE (note),
3214				       XEXP (note, 0),
3215				       REG_NOTES (insn));
3216	      insn = PREV_INSN (insn);
3217	    }
3218	  break;
3219
3220	case REG_NON_LOCAL_GOTO:
3221	  insn = insn_last;
3222	  while (insn != NULL_RTX)
3223	    {
3224	      if (JUMP_P (insn))
3225		REG_NOTES (insn)
3226		  = gen_rtx_EXPR_LIST (GET_MODE (note),
3227				       XEXP (note, 0),
3228				       REG_NOTES (insn));
3229	      insn = PREV_INSN (insn);
3230	    }
3231	  break;
3232
3233	default:
3234	  break;
3235	}
3236    }
3237
3238  /* If there are LABELS inside the split insns increment the
3239     usage count so we don't delete the label.  */
3240  if (NONJUMP_INSN_P (trial))
3241    {
3242      insn = insn_last;
3243      while (insn != NULL_RTX)
3244	{
3245	  if (NONJUMP_INSN_P (insn))
3246	    mark_label_nuses (PATTERN (insn));
3247
3248	  insn = PREV_INSN (insn);
3249	}
3250    }
3251
3252  tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3253
3254  delete_insn (trial);
3255  if (has_barrier)
3256    emit_barrier_after (tem);
3257
3258  /* Recursively call try_split for each new insn created; by the
3259     time control returns here that insn will be fully split, so
3260     set LAST and continue from the insn after the one returned.
3261     We can't use next_active_insn here since AFTER may be a note.
3262     Ignore deleted insns, which can be occur if not optimizing.  */
3263  for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3264    if (! INSN_DELETED_P (tem) && INSN_P (tem))
3265      tem = try_split (PATTERN (tem), tem, 1);
3266
3267  /* Return either the first or the last insn, depending on which was
3268     requested.  */
3269  return last
3270    ? (after ? PREV_INSN (after) : last_insn)
3271    : NEXT_INSN (before);
3272}
3273
3274/* Make and return an INSN rtx, initializing all its slots.
3275   Store PATTERN in the pattern slots.  */
3276
3277rtx
3278make_insn_raw (rtx pattern)
3279{
3280  rtx insn;
3281
3282  insn = rtx_alloc (INSN);
3283
3284  INSN_UID (insn) = cur_insn_uid++;
3285  PATTERN (insn) = pattern;
3286  INSN_CODE (insn) = -1;
3287  LOG_LINKS (insn) = NULL;
3288  REG_NOTES (insn) = NULL;
3289  INSN_LOCATOR (insn) = 0;
3290  BLOCK_FOR_INSN (insn) = NULL;
3291
3292#ifdef ENABLE_RTL_CHECKING
3293  if (insn
3294      && INSN_P (insn)
3295      && (returnjump_p (insn)
3296	  || (GET_CODE (insn) == SET
3297	      && SET_DEST (insn) == pc_rtx)))
3298    {
3299      warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3300      debug_rtx (insn);
3301    }
3302#endif
3303
3304  return insn;
3305}
3306
3307/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3308
3309rtx
3310make_jump_insn_raw (rtx pattern)
3311{
3312  rtx insn;
3313
3314  insn = rtx_alloc (JUMP_INSN);
3315  INSN_UID (insn) = cur_insn_uid++;
3316
3317  PATTERN (insn) = pattern;
3318  INSN_CODE (insn) = -1;
3319  LOG_LINKS (insn) = NULL;
3320  REG_NOTES (insn) = NULL;
3321  JUMP_LABEL (insn) = NULL;
3322  INSN_LOCATOR (insn) = 0;
3323  BLOCK_FOR_INSN (insn) = NULL;
3324
3325  return insn;
3326}
3327
3328/* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3329
3330static rtx
3331make_call_insn_raw (rtx pattern)
3332{
3333  rtx insn;
3334
3335  insn = rtx_alloc (CALL_INSN);
3336  INSN_UID (insn) = cur_insn_uid++;
3337
3338  PATTERN (insn) = pattern;
3339  INSN_CODE (insn) = -1;
3340  LOG_LINKS (insn) = NULL;
3341  REG_NOTES (insn) = NULL;
3342  CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3343  INSN_LOCATOR (insn) = 0;
3344  BLOCK_FOR_INSN (insn) = NULL;
3345
3346  return insn;
3347}
3348
3349/* Add INSN to the end of the doubly-linked list.
3350   INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3351
3352void
3353add_insn (rtx insn)
3354{
3355  PREV_INSN (insn) = last_insn;
3356  NEXT_INSN (insn) = 0;
3357
3358  if (NULL != last_insn)
3359    NEXT_INSN (last_insn) = insn;
3360
3361  if (NULL == first_insn)
3362    first_insn = insn;
3363
3364  last_insn = insn;
3365}
3366
3367/* Add INSN into the doubly-linked list after insn AFTER.  This and
3368   the next should be the only functions called to insert an insn once
3369   delay slots have been filled since only they know how to update a
3370   SEQUENCE.  */
3371
3372void
3373add_insn_after (rtx insn, rtx after)
3374{
3375  rtx next = NEXT_INSN (after);
3376  basic_block bb;
3377
3378  gcc_assert (!optimize || !INSN_DELETED_P (after));
3379
3380  NEXT_INSN (insn) = next;
3381  PREV_INSN (insn) = after;
3382
3383  if (next)
3384    {
3385      PREV_INSN (next) = insn;
3386      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3387	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3388    }
3389  else if (last_insn == after)
3390    last_insn = insn;
3391  else
3392    {
3393      struct sequence_stack *stack = seq_stack;
3394      /* Scan all pending sequences too.  */
3395      for (; stack; stack = stack->next)
3396	if (after == stack->last)
3397	  {
3398	    stack->last = insn;
3399	    break;
3400	  }
3401
3402      gcc_assert (stack);
3403    }
3404
3405  if (!BARRIER_P (after)
3406      && !BARRIER_P (insn)
3407      && (bb = BLOCK_FOR_INSN (after)))
3408    {
3409      set_block_for_insn (insn, bb);
3410      if (INSN_P (insn))
3411	bb->flags |= BB_DIRTY;
3412      /* Should not happen as first in the BB is always
3413	 either NOTE or LABEL.  */
3414      if (BB_END (bb) == after
3415	  /* Avoid clobbering of structure when creating new BB.  */
3416	  && !BARRIER_P (insn)
3417	  && (!NOTE_P (insn)
3418	      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3419	BB_END (bb) = insn;
3420    }
3421
3422  NEXT_INSN (after) = insn;
3423  if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3424    {
3425      rtx sequence = PATTERN (after);
3426      NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3427    }
3428}
3429
3430/* Add INSN into the doubly-linked list before insn BEFORE.  This and
3431   the previous should be the only functions called to insert an insn once
3432   delay slots have been filled since only they know how to update a
3433   SEQUENCE.  */
3434
3435void
3436add_insn_before (rtx insn, rtx before)
3437{
3438  rtx prev = PREV_INSN (before);
3439  basic_block bb;
3440
3441  gcc_assert (!optimize || !INSN_DELETED_P (before));
3442
3443  PREV_INSN (insn) = prev;
3444  NEXT_INSN (insn) = before;
3445
3446  if (prev)
3447    {
3448      NEXT_INSN (prev) = insn;
3449      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3450	{
3451	  rtx sequence = PATTERN (prev);
3452	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3453	}
3454    }
3455  else if (first_insn == before)
3456    first_insn = insn;
3457  else
3458    {
3459      struct sequence_stack *stack = seq_stack;
3460      /* Scan all pending sequences too.  */
3461      for (; stack; stack = stack->next)
3462	if (before == stack->first)
3463	  {
3464	    stack->first = insn;
3465	    break;
3466	  }
3467
3468      gcc_assert (stack);
3469    }
3470
3471  if (!BARRIER_P (before)
3472      && !BARRIER_P (insn)
3473      && (bb = BLOCK_FOR_INSN (before)))
3474    {
3475      set_block_for_insn (insn, bb);
3476      if (INSN_P (insn))
3477	bb->flags |= BB_DIRTY;
3478      /* Should not happen as first in the BB is always either NOTE or
3479	 LABEL.  */
3480      gcc_assert (BB_HEAD (bb) != insn
3481		  /* Avoid clobbering of structure when creating new BB.  */
3482		  || BARRIER_P (insn)
3483		  || (NOTE_P (insn)
3484		      && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3485    }
3486
3487  PREV_INSN (before) = insn;
3488  if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3489    PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3490}
3491
3492/* Remove an insn from its doubly-linked list.  This function knows how
3493   to handle sequences.  */
3494void
3495remove_insn (rtx insn)
3496{
3497  rtx next = NEXT_INSN (insn);
3498  rtx prev = PREV_INSN (insn);
3499  basic_block bb;
3500
3501  if (prev)
3502    {
3503      NEXT_INSN (prev) = next;
3504      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3505	{
3506	  rtx sequence = PATTERN (prev);
3507	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3508	}
3509    }
3510  else if (first_insn == insn)
3511    first_insn = next;
3512  else
3513    {
3514      struct sequence_stack *stack = seq_stack;
3515      /* Scan all pending sequences too.  */
3516      for (; stack; stack = stack->next)
3517	if (insn == stack->first)
3518	  {
3519	    stack->first = next;
3520	    break;
3521	  }
3522
3523      gcc_assert (stack);
3524    }
3525
3526  if (next)
3527    {
3528      PREV_INSN (next) = prev;
3529      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3530	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3531    }
3532  else if (last_insn == insn)
3533    last_insn = prev;
3534  else
3535    {
3536      struct sequence_stack *stack = seq_stack;
3537      /* Scan all pending sequences too.  */
3538      for (; stack; stack = stack->next)
3539	if (insn == stack->last)
3540	  {
3541	    stack->last = prev;
3542	    break;
3543	  }
3544
3545      gcc_assert (stack);
3546    }
3547  if (!BARRIER_P (insn)
3548      && (bb = BLOCK_FOR_INSN (insn)))
3549    {
3550      if (INSN_P (insn))
3551	bb->flags |= BB_DIRTY;
3552      if (BB_HEAD (bb) == insn)
3553	{
3554	  /* Never ever delete the basic block note without deleting whole
3555	     basic block.  */
3556	  gcc_assert (!NOTE_P (insn));
3557	  BB_HEAD (bb) = next;
3558	}
3559      if (BB_END (bb) == insn)
3560	BB_END (bb) = prev;
3561    }
3562}
3563
3564/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
3565
3566void
3567add_function_usage_to (rtx call_insn, rtx call_fusage)
3568{
3569  gcc_assert (call_insn && CALL_P (call_insn));
3570
3571  /* Put the register usage information on the CALL.  If there is already
3572     some usage information, put ours at the end.  */
3573  if (CALL_INSN_FUNCTION_USAGE (call_insn))
3574    {
3575      rtx link;
3576
3577      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3578	   link = XEXP (link, 1))
3579	;
3580
3581      XEXP (link, 1) = call_fusage;
3582    }
3583  else
3584    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3585}
3586
3587/* Delete all insns made since FROM.
3588   FROM becomes the new last instruction.  */
3589
3590void
3591delete_insns_since (rtx from)
3592{
3593  if (from == 0)
3594    first_insn = 0;
3595  else
3596    NEXT_INSN (from) = 0;
3597  last_insn = from;
3598}
3599
3600/* This function is deprecated, please use sequences instead.
3601
3602   Move a consecutive bunch of insns to a different place in the chain.
3603   The insns to be moved are those between FROM and TO.
3604   They are moved to a new position after the insn AFTER.
3605   AFTER must not be FROM or TO or any insn in between.
3606
3607   This function does not know about SEQUENCEs and hence should not be
3608   called after delay-slot filling has been done.  */
3609
3610void
3611reorder_insns_nobb (rtx from, rtx to, rtx after)
3612{
3613  /* Splice this bunch out of where it is now.  */
3614  if (PREV_INSN (from))
3615    NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3616  if (NEXT_INSN (to))
3617    PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3618  if (last_insn == to)
3619    last_insn = PREV_INSN (from);
3620  if (first_insn == from)
3621    first_insn = NEXT_INSN (to);
3622
3623  /* Make the new neighbors point to it and it to them.  */
3624  if (NEXT_INSN (after))
3625    PREV_INSN (NEXT_INSN (after)) = to;
3626
3627  NEXT_INSN (to) = NEXT_INSN (after);
3628  PREV_INSN (from) = after;
3629  NEXT_INSN (after) = from;
3630  if (after == last_insn)
3631    last_insn = to;
3632}
3633
3634/* Same as function above, but take care to update BB boundaries.  */
3635void
3636reorder_insns (rtx from, rtx to, rtx after)
3637{
3638  rtx prev = PREV_INSN (from);
3639  basic_block bb, bb2;
3640
3641  reorder_insns_nobb (from, to, after);
3642
3643  if (!BARRIER_P (after)
3644      && (bb = BLOCK_FOR_INSN (after)))
3645    {
3646      rtx x;
3647      bb->flags |= BB_DIRTY;
3648
3649      if (!BARRIER_P (from)
3650	  && (bb2 = BLOCK_FOR_INSN (from)))
3651	{
3652	  if (BB_END (bb2) == to)
3653	    BB_END (bb2) = prev;
3654	  bb2->flags |= BB_DIRTY;
3655	}
3656
3657      if (BB_END (bb) == after)
3658	BB_END (bb) = to;
3659
3660      for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3661	if (!BARRIER_P (x))
3662	  set_block_for_insn (x, bb);
3663    }
3664}
3665
3666/* Return the line note insn preceding INSN.  */
3667
3668static rtx
3669find_line_note (rtx insn)
3670{
3671  if (no_line_numbers)
3672    return 0;
3673
3674  for (; insn; insn = PREV_INSN (insn))
3675    if (NOTE_P (insn)
3676	&& NOTE_LINE_NUMBER (insn) >= 0)
3677      break;
3678
3679  return insn;
3680}
3681
3682
3683/* Emit insn(s) of given code and pattern
3684   at a specified place within the doubly-linked list.
3685
3686   All of the emit_foo global entry points accept an object
3687   X which is either an insn list or a PATTERN of a single
3688   instruction.
3689
3690   There are thus a few canonical ways to generate code and
3691   emit it at a specific place in the instruction stream.  For
3692   example, consider the instruction named SPOT and the fact that
3693   we would like to emit some instructions before SPOT.  We might
3694   do it like this:
3695
3696	start_sequence ();
3697	... emit the new instructions ...
3698	insns_head = get_insns ();
3699	end_sequence ();
3700
3701	emit_insn_before (insns_head, SPOT);
3702
3703   It used to be common to generate SEQUENCE rtl instead, but that
3704   is a relic of the past which no longer occurs.  The reason is that
3705   SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3706   generated would almost certainly die right after it was created.  */
3707
3708/* Make X be output before the instruction BEFORE.  */
3709
3710rtx
3711emit_insn_before_noloc (rtx x, rtx before)
3712{
3713  rtx last = before;
3714  rtx insn;
3715
3716  gcc_assert (before);
3717
3718  if (x == NULL_RTX)
3719    return last;
3720
3721  switch (GET_CODE (x))
3722    {
3723    case INSN:
3724    case JUMP_INSN:
3725    case CALL_INSN:
3726    case CODE_LABEL:
3727    case BARRIER:
3728    case NOTE:
3729      insn = x;
3730      while (insn)
3731	{
3732	  rtx next = NEXT_INSN (insn);
3733	  add_insn_before (insn, before);
3734	  last = insn;
3735	  insn = next;
3736	}
3737      break;
3738
3739#ifdef ENABLE_RTL_CHECKING
3740    case SEQUENCE:
3741      gcc_unreachable ();
3742      break;
3743#endif
3744
3745    default:
3746      last = make_insn_raw (x);
3747      add_insn_before (last, before);
3748      break;
3749    }
3750
3751  return last;
3752}
3753
3754/* Make an instruction with body X and code JUMP_INSN
3755   and output it before the instruction BEFORE.  */
3756
3757rtx
3758emit_jump_insn_before_noloc (rtx x, rtx before)
3759{
3760  rtx insn, last = NULL_RTX;
3761
3762  gcc_assert (before);
3763
3764  switch (GET_CODE (x))
3765    {
3766    case INSN:
3767    case JUMP_INSN:
3768    case CALL_INSN:
3769    case CODE_LABEL:
3770    case BARRIER:
3771    case NOTE:
3772      insn = x;
3773      while (insn)
3774	{
3775	  rtx next = NEXT_INSN (insn);
3776	  add_insn_before (insn, before);
3777	  last = insn;
3778	  insn = next;
3779	}
3780      break;
3781
3782#ifdef ENABLE_RTL_CHECKING
3783    case SEQUENCE:
3784      gcc_unreachable ();
3785      break;
3786#endif
3787
3788    default:
3789      last = make_jump_insn_raw (x);
3790      add_insn_before (last, before);
3791      break;
3792    }
3793
3794  return last;
3795}
3796
3797/* Make an instruction with body X and code CALL_INSN
3798   and output it before the instruction BEFORE.  */
3799
3800rtx
3801emit_call_insn_before_noloc (rtx x, rtx before)
3802{
3803  rtx last = NULL_RTX, insn;
3804
3805  gcc_assert (before);
3806
3807  switch (GET_CODE (x))
3808    {
3809    case INSN:
3810    case JUMP_INSN:
3811    case CALL_INSN:
3812    case CODE_LABEL:
3813    case BARRIER:
3814    case NOTE:
3815      insn = x;
3816      while (insn)
3817	{
3818	  rtx next = NEXT_INSN (insn);
3819	  add_insn_before (insn, before);
3820	  last = insn;
3821	  insn = next;
3822	}
3823      break;
3824
3825#ifdef ENABLE_RTL_CHECKING
3826    case SEQUENCE:
3827      gcc_unreachable ();
3828      break;
3829#endif
3830
3831    default:
3832      last = make_call_insn_raw (x);
3833      add_insn_before (last, before);
3834      break;
3835    }
3836
3837  return last;
3838}
3839
3840/* Make an insn of code BARRIER
3841   and output it before the insn BEFORE.  */
3842
3843rtx
3844emit_barrier_before (rtx before)
3845{
3846  rtx insn = rtx_alloc (BARRIER);
3847
3848  INSN_UID (insn) = cur_insn_uid++;
3849
3850  add_insn_before (insn, before);
3851  return insn;
3852}
3853
3854/* Emit the label LABEL before the insn BEFORE.  */
3855
3856rtx
3857emit_label_before (rtx label, rtx before)
3858{
3859  /* This can be called twice for the same label as a result of the
3860     confusion that follows a syntax error!  So make it harmless.  */
3861  if (INSN_UID (label) == 0)
3862    {
3863      INSN_UID (label) = cur_insn_uid++;
3864      add_insn_before (label, before);
3865    }
3866
3867  return label;
3868}
3869
3870/* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
3871
3872rtx
3873emit_note_before (int subtype, rtx before)
3874{
3875  rtx note = rtx_alloc (NOTE);
3876  INSN_UID (note) = cur_insn_uid++;
3877#ifndef USE_MAPPED_LOCATION
3878  NOTE_SOURCE_FILE (note) = 0;
3879#endif
3880  NOTE_LINE_NUMBER (note) = subtype;
3881  BLOCK_FOR_INSN (note) = NULL;
3882
3883  add_insn_before (note, before);
3884  return note;
3885}
3886
3887/* Helper for emit_insn_after, handles lists of instructions
3888   efficiently.  */
3889
3890static rtx emit_insn_after_1 (rtx, rtx);
3891
3892static rtx
3893emit_insn_after_1 (rtx first, rtx after)
3894{
3895  rtx last;
3896  rtx after_after;
3897  basic_block bb;
3898
3899  if (!BARRIER_P (after)
3900      && (bb = BLOCK_FOR_INSN (after)))
3901    {
3902      bb->flags |= BB_DIRTY;
3903      for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3904	if (!BARRIER_P (last))
3905	  set_block_for_insn (last, bb);
3906      if (!BARRIER_P (last))
3907	set_block_for_insn (last, bb);
3908      if (BB_END (bb) == after)
3909	BB_END (bb) = last;
3910    }
3911  else
3912    for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3913      continue;
3914
3915  after_after = NEXT_INSN (after);
3916
3917  NEXT_INSN (after) = first;
3918  PREV_INSN (first) = after;
3919  NEXT_INSN (last) = after_after;
3920  if (after_after)
3921    PREV_INSN (after_after) = last;
3922
3923  if (after == last_insn)
3924    last_insn = last;
3925  return last;
3926}
3927
3928/* Make X be output after the insn AFTER.  */
3929
3930rtx
3931emit_insn_after_noloc (rtx x, rtx after)
3932{
3933  rtx last = after;
3934
3935  gcc_assert (after);
3936
3937  if (x == NULL_RTX)
3938    return last;
3939
3940  switch (GET_CODE (x))
3941    {
3942    case INSN:
3943    case JUMP_INSN:
3944    case CALL_INSN:
3945    case CODE_LABEL:
3946    case BARRIER:
3947    case NOTE:
3948      last = emit_insn_after_1 (x, after);
3949      break;
3950
3951#ifdef ENABLE_RTL_CHECKING
3952    case SEQUENCE:
3953      gcc_unreachable ();
3954      break;
3955#endif
3956
3957    default:
3958      last = make_insn_raw (x);
3959      add_insn_after (last, after);
3960      break;
3961    }
3962
3963  return last;
3964}
3965
3966/* Similar to emit_insn_after, except that line notes are to be inserted so
3967   as to act as if this insn were at FROM.  */
3968
3969void
3970emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
3971{
3972  rtx from_line = find_line_note (from);
3973  rtx after_line = find_line_note (after);
3974  rtx insn = emit_insn_after (x, after);
3975
3976  if (from_line)
3977    emit_note_copy_after (from_line, after);
3978
3979  if (after_line)
3980    emit_note_copy_after (after_line, insn);
3981}
3982
3983/* Make an insn of code JUMP_INSN with body X
3984   and output it after the insn AFTER.  */
3985
3986rtx
3987emit_jump_insn_after_noloc (rtx x, rtx after)
3988{
3989  rtx last;
3990
3991  gcc_assert (after);
3992
3993  switch (GET_CODE (x))
3994    {
3995    case INSN:
3996    case JUMP_INSN:
3997    case CALL_INSN:
3998    case CODE_LABEL:
3999    case BARRIER:
4000    case NOTE:
4001      last = emit_insn_after_1 (x, after);
4002      break;
4003
4004#ifdef ENABLE_RTL_CHECKING
4005    case SEQUENCE:
4006      gcc_unreachable ();
4007      break;
4008#endif
4009
4010    default:
4011      last = make_jump_insn_raw (x);
4012      add_insn_after (last, after);
4013      break;
4014    }
4015
4016  return last;
4017}
4018
4019/* Make an instruction with body X and code CALL_INSN
4020   and output it after the instruction AFTER.  */
4021
4022rtx
4023emit_call_insn_after_noloc (rtx x, rtx after)
4024{
4025  rtx last;
4026
4027  gcc_assert (after);
4028
4029  switch (GET_CODE (x))
4030    {
4031    case INSN:
4032    case JUMP_INSN:
4033    case CALL_INSN:
4034    case CODE_LABEL:
4035    case BARRIER:
4036    case NOTE:
4037      last = emit_insn_after_1 (x, after);
4038      break;
4039
4040#ifdef ENABLE_RTL_CHECKING
4041    case SEQUENCE:
4042      gcc_unreachable ();
4043      break;
4044#endif
4045
4046    default:
4047      last = make_call_insn_raw (x);
4048      add_insn_after (last, after);
4049      break;
4050    }
4051
4052  return last;
4053}
4054
4055/* Make an insn of code BARRIER
4056   and output it after the insn AFTER.  */
4057
4058rtx
4059emit_barrier_after (rtx after)
4060{
4061  rtx insn = rtx_alloc (BARRIER);
4062
4063  INSN_UID (insn) = cur_insn_uid++;
4064
4065  add_insn_after (insn, after);
4066  return insn;
4067}
4068
4069/* Emit the label LABEL after the insn AFTER.  */
4070
4071rtx
4072emit_label_after (rtx label, rtx after)
4073{
4074  /* This can be called twice for the same label
4075     as a result of the confusion that follows a syntax error!
4076     So make it harmless.  */
4077  if (INSN_UID (label) == 0)
4078    {
4079      INSN_UID (label) = cur_insn_uid++;
4080      add_insn_after (label, after);
4081    }
4082
4083  return label;
4084}
4085
4086/* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4087
4088rtx
4089emit_note_after (int subtype, rtx after)
4090{
4091  rtx note = rtx_alloc (NOTE);
4092  INSN_UID (note) = cur_insn_uid++;
4093#ifndef USE_MAPPED_LOCATION
4094  NOTE_SOURCE_FILE (note) = 0;
4095#endif
4096  NOTE_LINE_NUMBER (note) = subtype;
4097  BLOCK_FOR_INSN (note) = NULL;
4098  add_insn_after (note, after);
4099  return note;
4100}
4101
4102/* Emit a copy of note ORIG after the insn AFTER.  */
4103
4104rtx
4105emit_note_copy_after (rtx orig, rtx after)
4106{
4107  rtx note;
4108
4109  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4110    {
4111      cur_insn_uid++;
4112      return 0;
4113    }
4114
4115  note = rtx_alloc (NOTE);
4116  INSN_UID (note) = cur_insn_uid++;
4117  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4118  NOTE_DATA (note) = NOTE_DATA (orig);
4119  BLOCK_FOR_INSN (note) = NULL;
4120  add_insn_after (note, after);
4121  return note;
4122}
4123
4124/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4125rtx
4126emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4127{
4128  rtx last = emit_insn_after_noloc (pattern, after);
4129
4130  if (pattern == NULL_RTX || !loc)
4131    return last;
4132
4133  after = NEXT_INSN (after);
4134  while (1)
4135    {
4136      if (active_insn_p (after) && !INSN_LOCATOR (after))
4137	INSN_LOCATOR (after) = loc;
4138      if (after == last)
4139	break;
4140      after = NEXT_INSN (after);
4141    }
4142  return last;
4143}
4144
4145/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4146rtx
4147emit_insn_after (rtx pattern, rtx after)
4148{
4149  if (INSN_P (after))
4150    return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4151  else
4152    return emit_insn_after_noloc (pattern, after);
4153}
4154
4155/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4156rtx
4157emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4158{
4159  rtx last = emit_jump_insn_after_noloc (pattern, after);
4160
4161  if (pattern == NULL_RTX || !loc)
4162    return last;
4163
4164  after = NEXT_INSN (after);
4165  while (1)
4166    {
4167      if (active_insn_p (after) && !INSN_LOCATOR (after))
4168	INSN_LOCATOR (after) = loc;
4169      if (after == last)
4170	break;
4171      after = NEXT_INSN (after);
4172    }
4173  return last;
4174}
4175
4176/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4177rtx
4178emit_jump_insn_after (rtx pattern, rtx after)
4179{
4180  if (INSN_P (after))
4181    return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4182  else
4183    return emit_jump_insn_after_noloc (pattern, after);
4184}
4185
4186/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4187rtx
4188emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4189{
4190  rtx last = emit_call_insn_after_noloc (pattern, after);
4191
4192  if (pattern == NULL_RTX || !loc)
4193    return last;
4194
4195  after = NEXT_INSN (after);
4196  while (1)
4197    {
4198      if (active_insn_p (after) && !INSN_LOCATOR (after))
4199	INSN_LOCATOR (after) = loc;
4200      if (after == last)
4201	break;
4202      after = NEXT_INSN (after);
4203    }
4204  return last;
4205}
4206
4207/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4208rtx
4209emit_call_insn_after (rtx pattern, rtx after)
4210{
4211  if (INSN_P (after))
4212    return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4213  else
4214    return emit_call_insn_after_noloc (pattern, after);
4215}
4216
4217/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE.  */
4218rtx
4219emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4220{
4221  rtx first = PREV_INSN (before);
4222  rtx last = emit_insn_before_noloc (pattern, before);
4223
4224  if (pattern == NULL_RTX || !loc)
4225    return last;
4226
4227  first = NEXT_INSN (first);
4228  while (1)
4229    {
4230      if (active_insn_p (first) && !INSN_LOCATOR (first))
4231	INSN_LOCATOR (first) = loc;
4232      if (first == last)
4233	break;
4234      first = NEXT_INSN (first);
4235    }
4236  return last;
4237}
4238
4239/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4240rtx
4241emit_insn_before (rtx pattern, rtx before)
4242{
4243  if (INSN_P (before))
4244    return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4245  else
4246    return emit_insn_before_noloc (pattern, before);
4247}
4248
4249/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4250rtx
4251emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4252{
4253  rtx first = PREV_INSN (before);
4254  rtx last = emit_jump_insn_before_noloc (pattern, before);
4255
4256  if (pattern == NULL_RTX)
4257    return last;
4258
4259  first = NEXT_INSN (first);
4260  while (1)
4261    {
4262      if (active_insn_p (first) && !INSN_LOCATOR (first))
4263	INSN_LOCATOR (first) = loc;
4264      if (first == last)
4265	break;
4266      first = NEXT_INSN (first);
4267    }
4268  return last;
4269}
4270
4271/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4272rtx
4273emit_jump_insn_before (rtx pattern, rtx before)
4274{
4275  if (INSN_P (before))
4276    return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4277  else
4278    return emit_jump_insn_before_noloc (pattern, before);
4279}
4280
4281/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4282rtx
4283emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4284{
4285  rtx first = PREV_INSN (before);
4286  rtx last = emit_call_insn_before_noloc (pattern, before);
4287
4288  if (pattern == NULL_RTX)
4289    return last;
4290
4291  first = NEXT_INSN (first);
4292  while (1)
4293    {
4294      if (active_insn_p (first) && !INSN_LOCATOR (first))
4295	INSN_LOCATOR (first) = loc;
4296      if (first == last)
4297	break;
4298      first = NEXT_INSN (first);
4299    }
4300  return last;
4301}
4302
4303/* like emit_call_insn_before_noloc,
4304   but set insn_locator according to before.  */
4305rtx
4306emit_call_insn_before (rtx pattern, rtx before)
4307{
4308  if (INSN_P (before))
4309    return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4310  else
4311    return emit_call_insn_before_noloc (pattern, before);
4312}
4313
4314/* Take X and emit it at the end of the doubly-linked
4315   INSN list.
4316
4317   Returns the last insn emitted.  */
4318
4319rtx
4320emit_insn (rtx x)
4321{
4322  rtx last = last_insn;
4323  rtx insn;
4324
4325  if (x == NULL_RTX)
4326    return last;
4327
4328  switch (GET_CODE (x))
4329    {
4330    case INSN:
4331    case JUMP_INSN:
4332    case CALL_INSN:
4333    case CODE_LABEL:
4334    case BARRIER:
4335    case NOTE:
4336      insn = x;
4337      while (insn)
4338	{
4339	  rtx next = NEXT_INSN (insn);
4340	  add_insn (insn);
4341	  last = insn;
4342	  insn = next;
4343	}
4344      break;
4345
4346#ifdef ENABLE_RTL_CHECKING
4347    case SEQUENCE:
4348      gcc_unreachable ();
4349      break;
4350#endif
4351
4352    default:
4353      last = make_insn_raw (x);
4354      add_insn (last);
4355      break;
4356    }
4357
4358  return last;
4359}
4360
4361/* Make an insn of code JUMP_INSN with pattern X
4362   and add it to the end of the doubly-linked list.  */
4363
4364rtx
4365emit_jump_insn (rtx x)
4366{
4367  rtx last = NULL_RTX, insn;
4368
4369  switch (GET_CODE (x))
4370    {
4371    case INSN:
4372    case JUMP_INSN:
4373    case CALL_INSN:
4374    case CODE_LABEL:
4375    case BARRIER:
4376    case NOTE:
4377      insn = x;
4378      while (insn)
4379	{
4380	  rtx next = NEXT_INSN (insn);
4381	  add_insn (insn);
4382	  last = insn;
4383	  insn = next;
4384	}
4385      break;
4386
4387#ifdef ENABLE_RTL_CHECKING
4388    case SEQUENCE:
4389      gcc_unreachable ();
4390      break;
4391#endif
4392
4393    default:
4394      last = make_jump_insn_raw (x);
4395      add_insn (last);
4396      break;
4397    }
4398
4399  return last;
4400}
4401
4402/* Make an insn of code CALL_INSN with pattern X
4403   and add it to the end of the doubly-linked list.  */
4404
4405rtx
4406emit_call_insn (rtx x)
4407{
4408  rtx insn;
4409
4410  switch (GET_CODE (x))
4411    {
4412    case INSN:
4413    case JUMP_INSN:
4414    case CALL_INSN:
4415    case CODE_LABEL:
4416    case BARRIER:
4417    case NOTE:
4418      insn = emit_insn (x);
4419      break;
4420
4421#ifdef ENABLE_RTL_CHECKING
4422    case SEQUENCE:
4423      gcc_unreachable ();
4424      break;
4425#endif
4426
4427    default:
4428      insn = make_call_insn_raw (x);
4429      add_insn (insn);
4430      break;
4431    }
4432
4433  return insn;
4434}
4435
4436/* Add the label LABEL to the end of the doubly-linked list.  */
4437
4438rtx
4439emit_label (rtx label)
4440{
4441  /* This can be called twice for the same label
4442     as a result of the confusion that follows a syntax error!
4443     So make it harmless.  */
4444  if (INSN_UID (label) == 0)
4445    {
4446      INSN_UID (label) = cur_insn_uid++;
4447      add_insn (label);
4448    }
4449  return label;
4450}
4451
4452/* Make an insn of code BARRIER
4453   and add it to the end of the doubly-linked list.  */
4454
4455rtx
4456emit_barrier (void)
4457{
4458  rtx barrier = rtx_alloc (BARRIER);
4459  INSN_UID (barrier) = cur_insn_uid++;
4460  add_insn (barrier);
4461  return barrier;
4462}
4463
4464/* Make line numbering NOTE insn for LOCATION add it to the end
4465   of the doubly-linked list, but only if line-numbers are desired for
4466   debugging info and it doesn't match the previous one.  */
4467
4468rtx
4469emit_line_note (location_t location)
4470{
4471  rtx note;
4472
4473#ifdef USE_MAPPED_LOCATION
4474  if (location == last_location)
4475    return NULL_RTX;
4476#else
4477  if (location.file && last_location.file
4478      && !strcmp (location.file, last_location.file)
4479      && location.line == last_location.line)
4480    return NULL_RTX;
4481#endif
4482  last_location = location;
4483
4484  if (no_line_numbers)
4485    {
4486      cur_insn_uid++;
4487      return NULL_RTX;
4488    }
4489
4490#ifdef USE_MAPPED_LOCATION
4491  note = emit_note ((int) location);
4492#else
4493  note = emit_note (location.line);
4494  NOTE_SOURCE_FILE (note) = location.file;
4495#endif
4496
4497  return note;
4498}
4499
4500/* Emit a copy of note ORIG.  */
4501
4502rtx
4503emit_note_copy (rtx orig)
4504{
4505  rtx note;
4506
4507  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4508    {
4509      cur_insn_uid++;
4510      return NULL_RTX;
4511    }
4512
4513  note = rtx_alloc (NOTE);
4514
4515  INSN_UID (note) = cur_insn_uid++;
4516  NOTE_DATA (note) = NOTE_DATA (orig);
4517  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4518  BLOCK_FOR_INSN (note) = NULL;
4519  add_insn (note);
4520
4521  return note;
4522}
4523
4524/* Make an insn of code NOTE or type NOTE_NO
4525   and add it to the end of the doubly-linked list.  */
4526
4527rtx
4528emit_note (int note_no)
4529{
4530  rtx note;
4531
4532  note = rtx_alloc (NOTE);
4533  INSN_UID (note) = cur_insn_uid++;
4534  NOTE_LINE_NUMBER (note) = note_no;
4535  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4536  BLOCK_FOR_INSN (note) = NULL;
4537  add_insn (note);
4538  return note;
4539}
4540
4541/* Cause next statement to emit a line note even if the line number
4542   has not changed.  */
4543
4544void
4545force_next_line_note (void)
4546{
4547#ifdef USE_MAPPED_LOCATION
4548  last_location = -1;
4549#else
4550  last_location.line = -1;
4551#endif
4552}
4553
4554/* Place a note of KIND on insn INSN with DATUM as the datum. If a
4555   note of this type already exists, remove it first.  */
4556
4557rtx
4558set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4559{
4560  rtx note = find_reg_note (insn, kind, NULL_RTX);
4561
4562  switch (kind)
4563    {
4564    case REG_EQUAL:
4565    case REG_EQUIV:
4566      /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4567	 has multiple sets (some callers assume single_set
4568	 means the insn only has one set, when in fact it
4569	 means the insn only has one * useful * set).  */
4570      if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4571	{
4572	  gcc_assert (!note);
4573	  return NULL_RTX;
4574	}
4575
4576      /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4577	 It serves no useful purpose and breaks eliminate_regs.  */
4578      if (GET_CODE (datum) == ASM_OPERANDS)
4579	return NULL_RTX;
4580      break;
4581
4582    default:
4583      break;
4584    }
4585
4586  if (note)
4587    {
4588      XEXP (note, 0) = datum;
4589      return note;
4590    }
4591
4592  REG_NOTES (insn) = gen_rtx_EXPR_LIST ((enum machine_mode) kind, datum,
4593					REG_NOTES (insn));
4594  return REG_NOTES (insn);
4595}
4596
4597/* Return an indication of which type of insn should have X as a body.
4598   The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
4599
4600static enum rtx_code
4601classify_insn (rtx x)
4602{
4603  if (LABEL_P (x))
4604    return CODE_LABEL;
4605  if (GET_CODE (x) == CALL)
4606    return CALL_INSN;
4607  if (GET_CODE (x) == RETURN)
4608    return JUMP_INSN;
4609  if (GET_CODE (x) == SET)
4610    {
4611      if (SET_DEST (x) == pc_rtx)
4612	return JUMP_INSN;
4613      else if (GET_CODE (SET_SRC (x)) == CALL)
4614	return CALL_INSN;
4615      else
4616	return INSN;
4617    }
4618  if (GET_CODE (x) == PARALLEL)
4619    {
4620      int j;
4621      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4622	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4623	  return CALL_INSN;
4624	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4625		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4626	  return JUMP_INSN;
4627	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4628		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4629	  return CALL_INSN;
4630    }
4631  return INSN;
4632}
4633
4634/* Emit the rtl pattern X as an appropriate kind of insn.
4635   If X is a label, it is simply added into the insn chain.  */
4636
4637rtx
4638emit (rtx x)
4639{
4640  enum rtx_code code = classify_insn (x);
4641
4642  switch (code)
4643    {
4644    case CODE_LABEL:
4645      return emit_label (x);
4646    case INSN:
4647      return emit_insn (x);
4648    case  JUMP_INSN:
4649      {
4650	rtx insn = emit_jump_insn (x);
4651	if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4652	  return emit_barrier ();
4653	return insn;
4654      }
4655    case CALL_INSN:
4656      return emit_call_insn (x);
4657    default:
4658      gcc_unreachable ();
4659    }
4660}
4661
4662/* Space for free sequence stack entries.  */
4663static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4664
4665/* Begin emitting insns to a sequence.  If this sequence will contain
4666   something that might cause the compiler to pop arguments to function
4667   calls (because those pops have previously been deferred; see
4668   INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4669   before calling this function.  That will ensure that the deferred
4670   pops are not accidentally emitted in the middle of this sequence.  */
4671
4672void
4673start_sequence (void)
4674{
4675  struct sequence_stack *tem;
4676
4677  if (free_sequence_stack != NULL)
4678    {
4679      tem = free_sequence_stack;
4680      free_sequence_stack = tem->next;
4681    }
4682  else
4683    tem = ggc_alloc (sizeof (struct sequence_stack));
4684
4685  tem->next = seq_stack;
4686  tem->first = first_insn;
4687  tem->last = last_insn;
4688
4689  seq_stack = tem;
4690
4691  first_insn = 0;
4692  last_insn = 0;
4693}
4694
4695/* Set up the insn chain starting with FIRST as the current sequence,
4696   saving the previously current one.  See the documentation for
4697   start_sequence for more information about how to use this function.  */
4698
4699void
4700push_to_sequence (rtx first)
4701{
4702  rtx last;
4703
4704  start_sequence ();
4705
4706  for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4707
4708  first_insn = first;
4709  last_insn = last;
4710}
4711
4712/* Set up the outer-level insn chain
4713   as the current sequence, saving the previously current one.  */
4714
4715void
4716push_topmost_sequence (void)
4717{
4718  struct sequence_stack *stack, *top = NULL;
4719
4720  start_sequence ();
4721
4722  for (stack = seq_stack; stack; stack = stack->next)
4723    top = stack;
4724
4725  first_insn = top->first;
4726  last_insn = top->last;
4727}
4728
4729/* After emitting to the outer-level insn chain, update the outer-level
4730   insn chain, and restore the previous saved state.  */
4731
4732void
4733pop_topmost_sequence (void)
4734{
4735  struct sequence_stack *stack, *top = NULL;
4736
4737  for (stack = seq_stack; stack; stack = stack->next)
4738    top = stack;
4739
4740  top->first = first_insn;
4741  top->last = last_insn;
4742
4743  end_sequence ();
4744}
4745
4746/* After emitting to a sequence, restore previous saved state.
4747
4748   To get the contents of the sequence just made, you must call
4749   `get_insns' *before* calling here.
4750
4751   If the compiler might have deferred popping arguments while
4752   generating this sequence, and this sequence will not be immediately
4753   inserted into the instruction stream, use do_pending_stack_adjust
4754   before calling get_insns.  That will ensure that the deferred
4755   pops are inserted into this sequence, and not into some random
4756   location in the instruction stream.  See INHIBIT_DEFER_POP for more
4757   information about deferred popping of arguments.  */
4758
4759void
4760end_sequence (void)
4761{
4762  struct sequence_stack *tem = seq_stack;
4763
4764  first_insn = tem->first;
4765  last_insn = tem->last;
4766  seq_stack = tem->next;
4767
4768  memset (tem, 0, sizeof (*tem));
4769  tem->next = free_sequence_stack;
4770  free_sequence_stack = tem;
4771}
4772
4773/* Return 1 if currently emitting into a sequence.  */
4774
4775int
4776in_sequence_p (void)
4777{
4778  return seq_stack != 0;
4779}
4780
4781/* Put the various virtual registers into REGNO_REG_RTX.  */
4782
4783static void
4784init_virtual_regs (struct emit_status *es)
4785{
4786  rtx *ptr = es->x_regno_reg_rtx;
4787  ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4788  ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4789  ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4790  ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4791  ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4792}
4793
4794
4795/* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
4796static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4797static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4798static int copy_insn_n_scratches;
4799
4800/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4801   copied an ASM_OPERANDS.
4802   In that case, it is the original input-operand vector.  */
4803static rtvec orig_asm_operands_vector;
4804
4805/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4806   copied an ASM_OPERANDS.
4807   In that case, it is the copied input-operand vector.  */
4808static rtvec copy_asm_operands_vector;
4809
4810/* Likewise for the constraints vector.  */
4811static rtvec orig_asm_constraints_vector;
4812static rtvec copy_asm_constraints_vector;
4813
4814/* Recursively create a new copy of an rtx for copy_insn.
4815   This function differs from copy_rtx in that it handles SCRATCHes and
4816   ASM_OPERANDs properly.
4817   Normally, this function is not used directly; use copy_insn as front end.
4818   However, you could first copy an insn pattern with copy_insn and then use
4819   this function afterwards to properly copy any REG_NOTEs containing
4820   SCRATCHes.  */
4821
4822rtx
4823copy_insn_1 (rtx orig)
4824{
4825  rtx copy;
4826  int i, j;
4827  RTX_CODE code;
4828  const char *format_ptr;
4829
4830  code = GET_CODE (orig);
4831
4832  switch (code)
4833    {
4834    case REG:
4835    case CONST_INT:
4836    case CONST_DOUBLE:
4837    case CONST_VECTOR:
4838    case SYMBOL_REF:
4839    case CODE_LABEL:
4840    case PC:
4841    case CC0:
4842      return orig;
4843    case CLOBBER:
4844      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4845	return orig;
4846      break;
4847
4848    case SCRATCH:
4849      for (i = 0; i < copy_insn_n_scratches; i++)
4850	if (copy_insn_scratch_in[i] == orig)
4851	  return copy_insn_scratch_out[i];
4852      break;
4853
4854    case CONST:
4855      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
4856	 a LABEL_REF, it isn't sharable.  */
4857      if (GET_CODE (XEXP (orig, 0)) == PLUS
4858	  && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4859	  && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4860	return orig;
4861      break;
4862
4863      /* A MEM with a constant address is not sharable.  The problem is that
4864	 the constant address may need to be reloaded.  If the mem is shared,
4865	 then reloading one copy of this mem will cause all copies to appear
4866	 to have been reloaded.  */
4867
4868    default:
4869      break;
4870    }
4871
4872  /* Copy the various flags, fields, and other information.  We assume
4873     that all fields need copying, and then clear the fields that should
4874     not be copied.  That is the sensible default behavior, and forces
4875     us to explicitly document why we are *not* copying a flag.  */
4876  copy = shallow_copy_rtx (orig);
4877
4878  /* We do not copy the USED flag, which is used as a mark bit during
4879     walks over the RTL.  */
4880  RTX_FLAG (copy, used) = 0;
4881
4882  /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
4883  if (INSN_P (orig))
4884    {
4885      RTX_FLAG (copy, jump) = 0;
4886      RTX_FLAG (copy, call) = 0;
4887      RTX_FLAG (copy, frame_related) = 0;
4888    }
4889
4890  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4891
4892  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4893    switch (*format_ptr++)
4894      {
4895      case 'e':
4896	if (XEXP (orig, i) != NULL)
4897	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4898	break;
4899
4900      case 'E':
4901      case 'V':
4902	if (XVEC (orig, i) == orig_asm_constraints_vector)
4903	  XVEC (copy, i) = copy_asm_constraints_vector;
4904	else if (XVEC (orig, i) == orig_asm_operands_vector)
4905	  XVEC (copy, i) = copy_asm_operands_vector;
4906	else if (XVEC (orig, i) != NULL)
4907	  {
4908	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4909	    for (j = 0; j < XVECLEN (copy, i); j++)
4910	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4911	  }
4912	break;
4913
4914      case 't':
4915      case 'w':
4916      case 'i':
4917      case 's':
4918      case 'S':
4919      case 'u':
4920      case '0':
4921	/* These are left unchanged.  */
4922	break;
4923
4924      default:
4925	gcc_unreachable ();
4926      }
4927
4928  if (code == SCRATCH)
4929    {
4930      i = copy_insn_n_scratches++;
4931      gcc_assert (i < MAX_RECOG_OPERANDS);
4932      copy_insn_scratch_in[i] = orig;
4933      copy_insn_scratch_out[i] = copy;
4934    }
4935  else if (code == ASM_OPERANDS)
4936    {
4937      orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4938      copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4939      orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4940      copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4941    }
4942
4943  return copy;
4944}
4945
4946/* Create a new copy of an rtx.
4947   This function differs from copy_rtx in that it handles SCRATCHes and
4948   ASM_OPERANDs properly.
4949   INSN doesn't really have to be a full INSN; it could be just the
4950   pattern.  */
4951rtx
4952copy_insn (rtx insn)
4953{
4954  copy_insn_n_scratches = 0;
4955  orig_asm_operands_vector = 0;
4956  orig_asm_constraints_vector = 0;
4957  copy_asm_operands_vector = 0;
4958  copy_asm_constraints_vector = 0;
4959  return copy_insn_1 (insn);
4960}
4961
4962/* Initialize data structures and variables in this file
4963   before generating rtl for each function.  */
4964
4965void
4966init_emit (void)
4967{
4968  struct function *f = cfun;
4969
4970  f->emit = ggc_alloc (sizeof (struct emit_status));
4971  first_insn = NULL;
4972  last_insn = NULL;
4973  cur_insn_uid = 1;
4974  reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4975  last_location = UNKNOWN_LOCATION;
4976  first_label_num = label_num;
4977  seq_stack = NULL;
4978
4979  /* Init the tables that describe all the pseudo regs.  */
4980
4981  f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4982
4983  f->emit->regno_pointer_align
4984    = ggc_alloc_cleared (f->emit->regno_pointer_align_length
4985			 * sizeof (unsigned char));
4986
4987  regno_reg_rtx
4988    = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
4989
4990  /* Put copies of all the hard registers into regno_reg_rtx.  */
4991  memcpy (regno_reg_rtx,
4992	  static_regno_reg_rtx,
4993	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
4994
4995  /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
4996  init_virtual_regs (f->emit);
4997
4998  /* Indicate that the virtual registers and stack locations are
4999     all pointers.  */
5000  REG_POINTER (stack_pointer_rtx) = 1;
5001  REG_POINTER (frame_pointer_rtx) = 1;
5002  REG_POINTER (hard_frame_pointer_rtx) = 1;
5003  REG_POINTER (arg_pointer_rtx) = 1;
5004
5005  REG_POINTER (virtual_incoming_args_rtx) = 1;
5006  REG_POINTER (virtual_stack_vars_rtx) = 1;
5007  REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5008  REG_POINTER (virtual_outgoing_args_rtx) = 1;
5009  REG_POINTER (virtual_cfa_rtx) = 1;
5010
5011#ifdef STACK_BOUNDARY
5012  REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5013  REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5014  REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5015  REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5016
5017  REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5018  REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5019  REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5020  REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5021  REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5022#endif
5023
5024#ifdef INIT_EXPANDERS
5025  INIT_EXPANDERS;
5026#endif
5027}
5028
5029/* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5030
5031static rtx
5032gen_const_vector (enum machine_mode mode, int constant)
5033{
5034  rtx tem;
5035  rtvec v;
5036  int units, i;
5037  enum machine_mode inner;
5038
5039  units = GET_MODE_NUNITS (mode);
5040  inner = GET_MODE_INNER (mode);
5041
5042  gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5043
5044  v = rtvec_alloc (units);
5045
5046  /* We need to call this function after we set the scalar const_tiny_rtx
5047     entries.  */
5048  gcc_assert (const_tiny_rtx[constant][(int) inner]);
5049
5050  for (i = 0; i < units; ++i)
5051    RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5052
5053  tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5054  return tem;
5055}
5056
5057/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5058   all elements are zero, and the one vector when all elements are one.  */
5059rtx
5060gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5061{
5062  enum machine_mode inner = GET_MODE_INNER (mode);
5063  int nunits = GET_MODE_NUNITS (mode);
5064  rtx x;
5065  int i;
5066
5067  /* Check to see if all of the elements have the same value.  */
5068  x = RTVEC_ELT (v, nunits - 1);
5069  for (i = nunits - 2; i >= 0; i--)
5070    if (RTVEC_ELT (v, i) != x)
5071      break;
5072
5073  /* If the values are all the same, check to see if we can use one of the
5074     standard constant vectors.  */
5075  if (i == -1)
5076    {
5077      if (x == CONST0_RTX (inner))
5078	return CONST0_RTX (mode);
5079      else if (x == CONST1_RTX (inner))
5080	return CONST1_RTX (mode);
5081    }
5082
5083  return gen_rtx_raw_CONST_VECTOR (mode, v);
5084}
5085
5086/* Create some permanent unique rtl objects shared between all functions.
5087   LINE_NUMBERS is nonzero if line numbers are to be generated.  */
5088
5089void
5090init_emit_once (int line_numbers)
5091{
5092  int i;
5093  enum machine_mode mode;
5094  enum machine_mode double_mode;
5095
5096  /* We need reg_raw_mode, so initialize the modes now.  */
5097  init_reg_modes_once ();
5098
5099  /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5100     tables.  */
5101  const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5102				    const_int_htab_eq, NULL);
5103
5104  const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5105				       const_double_htab_eq, NULL);
5106
5107  mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5108				    mem_attrs_htab_eq, NULL);
5109  reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5110				    reg_attrs_htab_eq, NULL);
5111
5112  no_line_numbers = ! line_numbers;
5113
5114  /* Compute the word and byte modes.  */
5115
5116  byte_mode = VOIDmode;
5117  word_mode = VOIDmode;
5118  double_mode = VOIDmode;
5119
5120  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5121       mode != VOIDmode;
5122       mode = GET_MODE_WIDER_MODE (mode))
5123    {
5124      if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5125	  && byte_mode == VOIDmode)
5126	byte_mode = mode;
5127
5128      if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5129	  && word_mode == VOIDmode)
5130	word_mode = mode;
5131    }
5132
5133  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5134       mode != VOIDmode;
5135       mode = GET_MODE_WIDER_MODE (mode))
5136    {
5137      if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5138	  && double_mode == VOIDmode)
5139	double_mode = mode;
5140    }
5141
5142  ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5143
5144  /* Assign register numbers to the globally defined register rtx.
5145     This must be done at runtime because the register number field
5146     is in a union and some compilers can't initialize unions.  */
5147
5148  pc_rtx = gen_rtx_PC (VOIDmode);
5149  cc0_rtx = gen_rtx_CC0 (VOIDmode);
5150  stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5151  frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5152  if (hard_frame_pointer_rtx == 0)
5153    hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5154					  HARD_FRAME_POINTER_REGNUM);
5155  if (arg_pointer_rtx == 0)
5156    arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5157  virtual_incoming_args_rtx =
5158    gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5159  virtual_stack_vars_rtx =
5160    gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5161  virtual_stack_dynamic_rtx =
5162    gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5163  virtual_outgoing_args_rtx =
5164    gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5165  virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5166
5167  /* Initialize RTL for commonly used hard registers.  These are
5168     copied into regno_reg_rtx as we begin to compile each function.  */
5169  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5170    static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5171
5172#ifdef INIT_EXPANDERS
5173  /* This is to initialize {init|mark|free}_machine_status before the first
5174     call to push_function_context_to.  This is needed by the Chill front
5175     end which calls push_function_context_to before the first call to
5176     init_function_start.  */
5177  INIT_EXPANDERS;
5178#endif
5179
5180  /* Create the unique rtx's for certain rtx codes and operand values.  */
5181
5182  /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5183     tries to use these variables.  */
5184  for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5185    const_int_rtx[i + MAX_SAVED_CONST_INT] =
5186      gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5187
5188  if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5189      && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5190    const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5191  else
5192    const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5193
5194  REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5195  REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5196  REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5197  REAL_VALUE_FROM_INT (dconst3,   3,  0, double_mode);
5198  REAL_VALUE_FROM_INT (dconst10, 10,  0, double_mode);
5199  REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5200  REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5201
5202  dconsthalf = dconst1;
5203  SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5204
5205  real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5206
5207  /* Initialize mathematical constants for constant folding builtins.
5208     These constants need to be given to at least 160 bits precision.  */
5209  real_from_string (&dconstpi,
5210    "3.1415926535897932384626433832795028841971693993751058209749445923078");
5211  real_from_string (&dconste,
5212    "2.7182818284590452353602874713526624977572470936999595749669676277241");
5213
5214  for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5215    {
5216      REAL_VALUE_TYPE *r =
5217	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5218
5219      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5220	   mode != VOIDmode;
5221	   mode = GET_MODE_WIDER_MODE (mode))
5222	const_tiny_rtx[i][(int) mode] =
5223	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5224
5225      for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5226	   mode != VOIDmode;
5227	   mode = GET_MODE_WIDER_MODE (mode))
5228	const_tiny_rtx[i][(int) mode] =
5229	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5230
5231      const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5232
5233      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5234	   mode != VOIDmode;
5235	   mode = GET_MODE_WIDER_MODE (mode))
5236	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5237
5238      for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5239	   mode != VOIDmode;
5240	   mode = GET_MODE_WIDER_MODE (mode))
5241	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5242    }
5243
5244  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5245       mode != VOIDmode;
5246       mode = GET_MODE_WIDER_MODE (mode))
5247    {
5248      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5249      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5250    }
5251
5252  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5253       mode != VOIDmode;
5254       mode = GET_MODE_WIDER_MODE (mode))
5255    {
5256      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5257      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5258    }
5259
5260  for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5261    if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5262      const_tiny_rtx[0][i] = const0_rtx;
5263
5264  const_tiny_rtx[0][(int) BImode] = const0_rtx;
5265  if (STORE_FLAG_VALUE == 1)
5266    const_tiny_rtx[1][(int) BImode] = const1_rtx;
5267
5268#ifdef RETURN_ADDRESS_POINTER_REGNUM
5269  return_address_pointer_rtx
5270    = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5271#endif
5272
5273#ifdef STATIC_CHAIN_REGNUM
5274  static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5275
5276#ifdef STATIC_CHAIN_INCOMING_REGNUM
5277  if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5278    static_chain_incoming_rtx
5279      = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5280  else
5281#endif
5282    static_chain_incoming_rtx = static_chain_rtx;
5283#endif
5284
5285#ifdef STATIC_CHAIN
5286  static_chain_rtx = STATIC_CHAIN;
5287
5288#ifdef STATIC_CHAIN_INCOMING
5289  static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5290#else
5291  static_chain_incoming_rtx = static_chain_rtx;
5292#endif
5293#endif
5294
5295  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5296    pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5297}
5298
5299/* Produce exact duplicate of insn INSN after AFTER.
5300   Care updating of libcall regions if present.  */
5301
5302rtx
5303emit_copy_of_insn_after (rtx insn, rtx after)
5304{
5305  rtx new;
5306  rtx note1, note2, link;
5307
5308  switch (GET_CODE (insn))
5309    {
5310    case INSN:
5311      new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5312      break;
5313
5314    case JUMP_INSN:
5315      new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5316      break;
5317
5318    case CALL_INSN:
5319      new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5320      if (CALL_INSN_FUNCTION_USAGE (insn))
5321	CALL_INSN_FUNCTION_USAGE (new)
5322	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5323      SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5324      CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5325      break;
5326
5327    default:
5328      gcc_unreachable ();
5329    }
5330
5331  /* Update LABEL_NUSES.  */
5332  mark_jump_label (PATTERN (new), new, 0);
5333
5334  INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5335
5336  /* If the old insn is frame related, then so is the new one.  This is
5337     primarily needed for IA-64 unwind info which marks epilogue insns,
5338     which may be duplicated by the basic block reordering code.  */
5339  RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5340
5341  /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5342     make them.  */
5343  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5344    if (REG_NOTE_KIND (link) != REG_LABEL)
5345      {
5346	if (GET_CODE (link) == EXPR_LIST)
5347	  REG_NOTES (new)
5348	    = copy_insn_1 (gen_rtx_EXPR_LIST (GET_MODE (link),
5349					      XEXP (link, 0),
5350					      REG_NOTES (new)));
5351	else
5352	  REG_NOTES (new)
5353	    = copy_insn_1 (gen_rtx_INSN_LIST (GET_MODE (link),
5354					      XEXP (link, 0),
5355					      REG_NOTES (new)));
5356      }
5357
5358  /* Fix the libcall sequences.  */
5359  if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5360    {
5361      rtx p = new;
5362      while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5363	p = PREV_INSN (p);
5364      XEXP (note1, 0) = p;
5365      XEXP (note2, 0) = new;
5366    }
5367  INSN_CODE (new) = INSN_CODE (insn);
5368  return new;
5369}
5370
5371static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5372rtx
5373gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5374{
5375  if (hard_reg_clobbers[mode][regno])
5376    return hard_reg_clobbers[mode][regno];
5377  else
5378    return (hard_reg_clobbers[mode][regno] =
5379	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5380}
5381
5382#include "gt-emit-rtl.h"
5383