1/* Emit RTL for the GCC expander.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
25   This file contains support functions for creating rtl expressions
26   and manipulating them in the doubly-linked chain of insns.
27
28   The patterns of the insns are created by machine-dependent
29   routines in insn-emit.c, which is generated automatically from
30   the machine description.  These routines make the individual rtx's
31   of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32   which are automatically generated from rtl.def; what is machine
33   dependent is the kind of rtx's they make and what arguments they
34   use.  */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40#include "toplev.h"
41#include "rtl.h"
42#include "tree.h"
43#include "tm_p.h"
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
48#include "hard-reg-set.h"
49#include "hashtab.h"
50#include "insn-config.h"
51#include "recog.h"
52#include "real.h"
53#include "fixed-value.h"
54#include "bitmap.h"
55#include "basic-block.h"
56#include "ggc.h"
57#include "debug.h"
58#include "langhooks.h"
59#include "tree-pass.h"
60#include "df.h"
61#include "params.h"
62#include "target.h"
63
64/* Commonly used modes.  */
65
66enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
67enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
68enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
69enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
70
71/* Datastructures maintained for currently processed function in RTL form.  */
72
73struct rtl_data x_rtl;
74
75/* Indexed by pseudo register number, gives the rtx for that pseudo.
76   Allocated in parallel with regno_pointer_align.
77   FIXME: We could put it into emit_status struct, but gengtype is not able to deal
78   with length attribute nested in top level structures.  */
79
80rtx * regno_reg_rtx;
81
82/* This is *not* reset after each function.  It gives each CODE_LABEL
83   in the entire compilation a unique label number.  */
84
85static GTY(()) int label_num = 1;
86
87/* Commonly used rtx's, so that we only need space for one copy.
88   These are initialized once for the entire compilation.
89   All of these are unique; no other rtx-object will be equal to any
90   of these.  */
91
92rtx global_rtl[GR_MAX];
93
94/* Commonly used RTL for hard registers.  These objects are not necessarily
95   unique, so we allocate them separately from global_rtl.  They are
96   initialized once per compilation unit, then copied into regno_reg_rtx
97   at the beginning of each function.  */
98static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
100/* We record floating-point CONST_DOUBLEs in each floating-point mode for
101   the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
102   record a copy of const[012]_rtx.  */
103
104rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
105
106rtx const_true_rtx;
107
108REAL_VALUE_TYPE dconst0;
109REAL_VALUE_TYPE dconst1;
110REAL_VALUE_TYPE dconst2;
111REAL_VALUE_TYPE dconstm1;
112REAL_VALUE_TYPE dconsthalf;
113
114/* Record fixed-point constant 0 and 1.  */
115FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
116FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
117
118/* All references to the following fixed hard registers go through
119   these unique rtl objects.  On machines where the frame-pointer and
120   arg-pointer are the same register, they use the same unique object.
121
122   After register allocation, other rtl objects which used to be pseudo-regs
123   may be clobbered to refer to the frame-pointer register.
124   But references that were originally to the frame-pointer can be
125   distinguished from the others because they contain frame_pointer_rtx.
126
127   When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
128   tricky: until register elimination has taken place hard_frame_pointer_rtx
129   should be used if it is being set, and frame_pointer_rtx otherwise.  After
130   register elimination hard_frame_pointer_rtx should always be used.
131   On machines where the two registers are same (most) then these are the
132   same.
133
134   In an inline procedure, the stack and frame pointer rtxs may not be
135   used for anything else.  */
136rtx pic_offset_table_rtx;	/* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
137
138/* This is used to implement __builtin_return_address for some machines.
139   See for instance the MIPS port.  */
140rtx return_address_pointer_rtx;	/* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
141
142/* We make one copy of (const_int C) where C is in
143   [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
144   to save space during the compilation and simplify comparisons of
145   integers.  */
146
147rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
148
149/* A hash table storing CONST_INTs whose absolute value is greater
150   than MAX_SAVED_CONST_INT.  */
151
152static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
153     htab_t const_int_htab;
154
155/* A hash table storing memory attribute structures.  */
156static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
157     htab_t mem_attrs_htab;
158
159/* A hash table storing register attribute structures.  */
160static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
161     htab_t reg_attrs_htab;
162
163/* A hash table storing all CONST_DOUBLEs.  */
164static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
165     htab_t const_double_htab;
166
167/* A hash table storing all CONST_FIXEDs.  */
168static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169     htab_t const_fixed_htab;
170
171#define first_insn (crtl->emit.x_first_insn)
172#define last_insn (crtl->emit.x_last_insn)
173#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
174#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
175#define last_location (crtl->emit.x_last_location)
176#define first_label_num (crtl->emit.x_first_label_num)
177
178static rtx make_call_insn_raw (rtx);
179static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
180static void set_used_decls (tree);
181static void mark_label_nuses (rtx);
182static hashval_t const_int_htab_hash (const void *);
183static int const_int_htab_eq (const void *, const void *);
184static hashval_t const_double_htab_hash (const void *);
185static int const_double_htab_eq (const void *, const void *);
186static rtx lookup_const_double (rtx);
187static hashval_t const_fixed_htab_hash (const void *);
188static int const_fixed_htab_eq (const void *, const void *);
189static rtx lookup_const_fixed (rtx);
190static hashval_t mem_attrs_htab_hash (const void *);
191static int mem_attrs_htab_eq (const void *, const void *);
192static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
193				 addr_space_t, enum machine_mode);
194static hashval_t reg_attrs_htab_hash (const void *);
195static int reg_attrs_htab_eq (const void *, const void *);
196static reg_attrs *get_reg_attrs (tree, int);
197static rtx gen_const_vector (enum machine_mode, int);
198static void copy_rtx_if_shared_1 (rtx *orig);
199
200/* Probability of the conditional branch currently proceeded by try_split.
201   Set to -1 otherwise.  */
202int split_branch_probability = -1;
203
204/* Returns a hash code for X (which is a really a CONST_INT).  */
205
206static hashval_t
207const_int_htab_hash (const void *x)
208{
209  return (hashval_t) INTVAL ((const_rtx) x);
210}
211
212/* Returns nonzero if the value represented by X (which is really a
213   CONST_INT) is the same as that given by Y (which is really a
214   HOST_WIDE_INT *).  */
215
216static int
217const_int_htab_eq (const void *x, const void *y)
218{
219  return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
220}
221
222/* Returns a hash code for X (which is really a CONST_DOUBLE).  */
223static hashval_t
224const_double_htab_hash (const void *x)
225{
226  const_rtx const value = (const_rtx) x;
227  hashval_t h;
228
229  if (GET_MODE (value) == VOIDmode)
230    h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
231  else
232    {
233      h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
234      /* MODE is used in the comparison, so it should be in the hash.  */
235      h ^= GET_MODE (value);
236    }
237  return h;
238}
239
240/* Returns nonzero if the value represented by X (really a ...)
241   is the same as that represented by Y (really a ...) */
242static int
243const_double_htab_eq (const void *x, const void *y)
244{
245  const_rtx const a = (const_rtx)x, b = (const_rtx)y;
246
247  if (GET_MODE (a) != GET_MODE (b))
248    return 0;
249  if (GET_MODE (a) == VOIDmode)
250    return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
251	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
252  else
253    return real_identical (CONST_DOUBLE_REAL_VALUE (a),
254			   CONST_DOUBLE_REAL_VALUE (b));
255}
256
257/* Returns a hash code for X (which is really a CONST_FIXED).  */
258
259static hashval_t
260const_fixed_htab_hash (const void *x)
261{
262  const_rtx const value = (const_rtx) x;
263  hashval_t h;
264
265  h = fixed_hash (CONST_FIXED_VALUE (value));
266  /* MODE is used in the comparison, so it should be in the hash.  */
267  h ^= GET_MODE (value);
268  return h;
269}
270
271/* Returns nonzero if the value represented by X (really a ...)
272   is the same as that represented by Y (really a ...).  */
273
274static int
275const_fixed_htab_eq (const void *x, const void *y)
276{
277  const_rtx const a = (const_rtx) x, b = (const_rtx) y;
278
279  if (GET_MODE (a) != GET_MODE (b))
280    return 0;
281  return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
282}
283
284/* Returns a hash code for X (which is a really a mem_attrs *).  */
285
286static hashval_t
287mem_attrs_htab_hash (const void *x)
288{
289  const mem_attrs *const p = (const mem_attrs *) x;
290
291  return (p->alias ^ (p->align * 1000)
292	  ^ (p->addrspace * 4000)
293	  ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
294	  ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
295	  ^ (size_t) iterative_hash_expr (p->expr, 0));
296}
297
298/* Returns nonzero if the value represented by X (which is really a
299   mem_attrs *) is the same as that given by Y (which is also really a
300   mem_attrs *).  */
301
302static int
303mem_attrs_htab_eq (const void *x, const void *y)
304{
305  const mem_attrs *const p = (const mem_attrs *) x;
306  const mem_attrs *const q = (const mem_attrs *) y;
307
308  return (p->alias == q->alias && p->offset == q->offset
309	  && p->size == q->size && p->align == q->align
310	  && p->addrspace == q->addrspace
311	  && (p->expr == q->expr
312	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
313		  && operand_equal_p (p->expr, q->expr, 0))));
314}
315
316/* Allocate a new mem_attrs structure and insert it into the hash table if
317   one identical to it is not already in the table.  We are doing this for
318   MEM of mode MODE.  */
319
320static mem_attrs *
321get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
322	       unsigned int align, addr_space_t addrspace, enum machine_mode mode)
323{
324  mem_attrs attrs;
325  void **slot;
326
327  /* If everything is the default, we can just return zero.
328     This must match what the corresponding MEM_* macros return when the
329     field is not present.  */
330  if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
331      && (size == 0
332	  || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
333      && (STRICT_ALIGNMENT && mode != BLKmode
334	  ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
335    return 0;
336
337  attrs.alias = alias;
338  attrs.expr = expr;
339  attrs.offset = offset;
340  attrs.size = size;
341  attrs.align = align;
342  attrs.addrspace = addrspace;
343
344  slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
345  if (*slot == 0)
346    {
347      *slot = ggc_alloc (sizeof (mem_attrs));
348      memcpy (*slot, &attrs, sizeof (mem_attrs));
349    }
350
351  return (mem_attrs *) *slot;
352}
353
354/* Returns a hash code for X (which is a really a reg_attrs *).  */
355
356static hashval_t
357reg_attrs_htab_hash (const void *x)
358{
359  const reg_attrs *const p = (const reg_attrs *) x;
360
361  return ((p->offset * 1000) ^ (long) p->decl);
362}
363
364/* Returns nonzero if the value represented by X (which is really a
365   reg_attrs *) is the same as that given by Y (which is also really a
366   reg_attrs *).  */
367
368static int
369reg_attrs_htab_eq (const void *x, const void *y)
370{
371  const reg_attrs *const p = (const reg_attrs *) x;
372  const reg_attrs *const q = (const reg_attrs *) y;
373
374  return (p->decl == q->decl && p->offset == q->offset);
375}
376/* Allocate a new reg_attrs structure and insert it into the hash table if
377   one identical to it is not already in the table.  We are doing this for
378   MEM of mode MODE.  */
379
380static reg_attrs *
381get_reg_attrs (tree decl, int offset)
382{
383  reg_attrs attrs;
384  void **slot;
385
386  /* If everything is the default, we can just return zero.  */
387  if (decl == 0 && offset == 0)
388    return 0;
389
390  attrs.decl = decl;
391  attrs.offset = offset;
392
393  slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
394  if (*slot == 0)
395    {
396      *slot = ggc_alloc (sizeof (reg_attrs));
397      memcpy (*slot, &attrs, sizeof (reg_attrs));
398    }
399
400  return (reg_attrs *) *slot;
401}
402
403
404#if !HAVE_blockage
405/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
406   across this insn. */
407
408rtx
409gen_blockage (void)
410{
411  rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
412  MEM_VOLATILE_P (x) = true;
413  return x;
414}
415#endif
416
417
418/* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
419   don't attempt to share with the various global pieces of rtl (such as
420   frame_pointer_rtx).  */
421
422rtx
423gen_raw_REG (enum machine_mode mode, int regno)
424{
425  rtx x = gen_rtx_raw_REG (mode, regno);
426  ORIGINAL_REGNO (x) = regno;
427  return x;
428}
429
430/* There are some RTL codes that require special attention; the generation
431   functions do the raw handling.  If you add to this list, modify
432   special_rtx in gengenrtl.c as well.  */
433
434rtx
435gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
436{
437  void **slot;
438
439  if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
440    return const_int_rtx[arg + MAX_SAVED_CONST_INT];
441
442#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
443  if (const_true_rtx && arg == STORE_FLAG_VALUE)
444    return const_true_rtx;
445#endif
446
447  /* Look up the CONST_INT in the hash table.  */
448  slot = htab_find_slot_with_hash (const_int_htab, &arg,
449				   (hashval_t) arg, INSERT);
450  if (*slot == 0)
451    *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
452
453  return (rtx) *slot;
454}
455
456rtx
457gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
458{
459  return GEN_INT (trunc_int_for_mode (c, mode));
460}
461
462/* CONST_DOUBLEs might be created from pairs of integers, or from
463   REAL_VALUE_TYPEs.  Also, their length is known only at run time,
464   so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
465
466/* Determine whether REAL, a CONST_DOUBLE, already exists in the
467   hash table.  If so, return its counterpart; otherwise add it
468   to the hash table and return it.  */
469static rtx
470lookup_const_double (rtx real)
471{
472  void **slot = htab_find_slot (const_double_htab, real, INSERT);
473  if (*slot == 0)
474    *slot = real;
475
476  return (rtx) *slot;
477}
478
479/* Return a CONST_DOUBLE rtx for a floating-point value specified by
480   VALUE in mode MODE.  */
481rtx
482const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
483{
484  rtx real = rtx_alloc (CONST_DOUBLE);
485  PUT_MODE (real, mode);
486
487  real->u.rv = value;
488
489  return lookup_const_double (real);
490}
491
492/* Determine whether FIXED, a CONST_FIXED, already exists in the
493   hash table.  If so, return its counterpart; otherwise add it
494   to the hash table and return it.  */
495
496static rtx
497lookup_const_fixed (rtx fixed)
498{
499  void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
500  if (*slot == 0)
501    *slot = fixed;
502
503  return (rtx) *slot;
504}
505
506/* Return a CONST_FIXED rtx for a fixed-point value specified by
507   VALUE in mode MODE.  */
508
509rtx
510const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
511{
512  rtx fixed = rtx_alloc (CONST_FIXED);
513  PUT_MODE (fixed, mode);
514
515  fixed->u.fv = value;
516
517  return lookup_const_fixed (fixed);
518}
519
520/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
521   of ints: I0 is the low-order word and I1 is the high-order word.
522   Do not use this routine for non-integer modes; convert to
523   REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
524
525rtx
526immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
527{
528  rtx value;
529  unsigned int i;
530
531  /* There are the following cases (note that there are no modes with
532     HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
533
534     1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
535	gen_int_mode.
536     2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
537	the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
538	from copies of the sign bit, and sign of i0 and i1 are the same),  then
539	we return a CONST_INT for i0.
540     3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
541  if (mode != VOIDmode)
542    {
543      gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
544		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
545		  /* We can get a 0 for an error mark.  */
546		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
547		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
548
549      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
550	return gen_int_mode (i0, mode);
551
552      gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
553    }
554
555  /* If this integer fits in one word, return a CONST_INT.  */
556  if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
557    return GEN_INT (i0);
558
559  /* We use VOIDmode for integers.  */
560  value = rtx_alloc (CONST_DOUBLE);
561  PUT_MODE (value, VOIDmode);
562
563  CONST_DOUBLE_LOW (value) = i0;
564  CONST_DOUBLE_HIGH (value) = i1;
565
566  for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
567    XWINT (value, i) = 0;
568
569  return lookup_const_double (value);
570}
571
572rtx
573gen_rtx_REG (enum machine_mode mode, unsigned int regno)
574{
575  /* In case the MD file explicitly references the frame pointer, have
576     all such references point to the same frame pointer.  This is
577     used during frame pointer elimination to distinguish the explicit
578     references to these registers from pseudos that happened to be
579     assigned to them.
580
581     If we have eliminated the frame pointer or arg pointer, we will
582     be using it as a normal register, for example as a spill
583     register.  In such cases, we might be accessing it in a mode that
584     is not Pmode and therefore cannot use the pre-allocated rtx.
585
586     Also don't do this when we are making new REGs in reload, since
587     we don't want to get confused with the real pointers.  */
588
589  if (mode == Pmode && !reload_in_progress)
590    {
591      if (regno == FRAME_POINTER_REGNUM
592	  && (!reload_completed || frame_pointer_needed))
593	return frame_pointer_rtx;
594#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
595      if (regno == HARD_FRAME_POINTER_REGNUM
596	  && (!reload_completed || frame_pointer_needed))
597	return hard_frame_pointer_rtx;
598#endif
599#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
600      if (regno == ARG_POINTER_REGNUM)
601	return arg_pointer_rtx;
602#endif
603#ifdef RETURN_ADDRESS_POINTER_REGNUM
604      if (regno == RETURN_ADDRESS_POINTER_REGNUM)
605	return return_address_pointer_rtx;
606#endif
607      if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
608	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
609	return pic_offset_table_rtx;
610      if (regno == STACK_POINTER_REGNUM)
611	return stack_pointer_rtx;
612    }
613
614#if 0
615  /* If the per-function register table has been set up, try to re-use
616     an existing entry in that table to avoid useless generation of RTL.
617
618     This code is disabled for now until we can fix the various backends
619     which depend on having non-shared hard registers in some cases.   Long
620     term we want to re-enable this code as it can significantly cut down
621     on the amount of useless RTL that gets generated.
622
623     We'll also need to fix some code that runs after reload that wants to
624     set ORIGINAL_REGNO.  */
625
626  if (cfun
627      && cfun->emit
628      && regno_reg_rtx
629      && regno < FIRST_PSEUDO_REGISTER
630      && reg_raw_mode[regno] == mode)
631    return regno_reg_rtx[regno];
632#endif
633
634  return gen_raw_REG (mode, regno);
635}
636
637rtx
638gen_rtx_MEM (enum machine_mode mode, rtx addr)
639{
640  rtx rt = gen_rtx_raw_MEM (mode, addr);
641
642  /* This field is not cleared by the mere allocation of the rtx, so
643     we clear it here.  */
644  MEM_ATTRS (rt) = 0;
645
646  return rt;
647}
648
649/* Generate a memory referring to non-trapping constant memory.  */
650
651rtx
652gen_const_mem (enum machine_mode mode, rtx addr)
653{
654  rtx mem = gen_rtx_MEM (mode, addr);
655  MEM_READONLY_P (mem) = 1;
656  MEM_NOTRAP_P (mem) = 1;
657  return mem;
658}
659
660/* Generate a MEM referring to fixed portions of the frame, e.g., register
661   save areas.  */
662
663rtx
664gen_frame_mem (enum machine_mode mode, rtx addr)
665{
666  rtx mem = gen_rtx_MEM (mode, addr);
667  MEM_NOTRAP_P (mem) = 1;
668  set_mem_alias_set (mem, get_frame_alias_set ());
669  return mem;
670}
671
672/* Generate a MEM referring to a temporary use of the stack, not part
673    of the fixed stack frame.  For example, something which is pushed
674    by a target splitter.  */
675rtx
676gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
677{
678  rtx mem = gen_rtx_MEM (mode, addr);
679  MEM_NOTRAP_P (mem) = 1;
680  if (!cfun->calls_alloca)
681    set_mem_alias_set (mem, get_frame_alias_set ());
682  return mem;
683}
684
685/* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
686   this construct would be valid, and false otherwise.  */
687
688bool
689validate_subreg (enum machine_mode omode, enum machine_mode imode,
690		 const_rtx reg, unsigned int offset)
691{
692  unsigned int isize = GET_MODE_SIZE (imode);
693  unsigned int osize = GET_MODE_SIZE (omode);
694
695  /* All subregs must be aligned.  */
696  if (offset % osize != 0)
697    return false;
698
699  /* The subreg offset cannot be outside the inner object.  */
700  if (offset >= isize)
701    return false;
702
703  /* ??? This should not be here.  Temporarily continue to allow word_mode
704     subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
705     Generally, backends are doing something sketchy but it'll take time to
706     fix them all.  */
707  if (omode == word_mode)
708    ;
709  /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
710     is the culprit here, and not the backends.  */
711  else if (osize >= UNITS_PER_WORD && isize >= osize)
712    ;
713  /* Allow component subregs of complex and vector.  Though given the below
714     extraction rules, it's not always clear what that means.  */
715  else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
716	   && GET_MODE_INNER (imode) == omode)
717    ;
718  /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
719     i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
720     represent this.  It's questionable if this ought to be represented at
721     all -- why can't this all be hidden in post-reload splitters that make
722     arbitrarily mode changes to the registers themselves.  */
723  else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
724    ;
725  /* Subregs involving floating point modes are not allowed to
726     change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
727     (subreg:SI (reg:DF) 0) isn't.  */
728  else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
729    {
730      if (isize != osize)
731	return false;
732    }
733
734  /* Paradoxical subregs must have offset zero.  */
735  if (osize > isize)
736    return offset == 0;
737
738  /* This is a normal subreg.  Verify that the offset is representable.  */
739
740  /* For hard registers, we already have most of these rules collected in
741     subreg_offset_representable_p.  */
742  if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
743    {
744      unsigned int regno = REGNO (reg);
745
746#ifdef CANNOT_CHANGE_MODE_CLASS
747      if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
748	  && GET_MODE_INNER (imode) == omode)
749	;
750      else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
751	return false;
752#endif
753
754      return subreg_offset_representable_p (regno, imode, offset, omode);
755    }
756
757  /* For pseudo registers, we want most of the same checks.  Namely:
758     If the register no larger than a word, the subreg must be lowpart.
759     If the register is larger than a word, the subreg must be the lowpart
760     of a subword.  A subreg does *not* perform arbitrary bit extraction.
761     Given that we've already checked mode/offset alignment, we only have
762     to check subword subregs here.  */
763  if (osize < UNITS_PER_WORD)
764    {
765      enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766      unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767      if (offset % UNITS_PER_WORD != low_off)
768	return false;
769    }
770  return true;
771}
772
773rtx
774gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775{
776  gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
777  return gen_rtx_raw_SUBREG (mode, reg, offset);
778}
779
780/* Generate a SUBREG representing the least-significant part of REG if MODE
781   is smaller than mode of REG, otherwise paradoxical SUBREG.  */
782
783rtx
784gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785{
786  enum machine_mode inmode;
787
788  inmode = GET_MODE (reg);
789  if (inmode == VOIDmode)
790    inmode = mode;
791  return gen_rtx_SUBREG (mode, reg,
792			 subreg_lowpart_offset (mode, inmode));
793}
794
795
796/* Create an rtvec and stores within it the RTXen passed in the arguments.  */
797
798rtvec
799gen_rtvec (int n, ...)
800{
801  int i;
802  rtvec rt_val;
803  va_list p;
804
805  va_start (p, n);
806
807  /* Don't allocate an empty rtvec...  */
808  if (n == 0)
809    return NULL_RTVEC;
810
811  rt_val = rtvec_alloc (n);
812
813  for (i = 0; i < n; i++)
814    rt_val->elem[i] = va_arg (p, rtx);
815
816  va_end (p);
817  return rt_val;
818}
819
820rtvec
821gen_rtvec_v (int n, rtx *argp)
822{
823  int i;
824  rtvec rt_val;
825
826  /* Don't allocate an empty rtvec...  */
827  if (n == 0)
828    return NULL_RTVEC;
829
830  rt_val = rtvec_alloc (n);
831
832  for (i = 0; i < n; i++)
833    rt_val->elem[i] = *argp++;
834
835  return rt_val;
836}
837
838/* Return the number of bytes between the start of an OUTER_MODE
839   in-memory value and the start of an INNER_MODE in-memory value,
840   given that the former is a lowpart of the latter.  It may be a
841   paradoxical lowpart, in which case the offset will be negative
842   on big-endian targets.  */
843
844int
845byte_lowpart_offset (enum machine_mode outer_mode,
846		     enum machine_mode inner_mode)
847{
848  if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
849    return subreg_lowpart_offset (outer_mode, inner_mode);
850  else
851    return -subreg_lowpart_offset (inner_mode, outer_mode);
852}
853
854/* Generate a REG rtx for a new pseudo register of mode MODE.
855   This pseudo is assigned the next sequential register number.  */
856
857rtx
858gen_reg_rtx (enum machine_mode mode)
859{
860  rtx val;
861  unsigned int align = GET_MODE_ALIGNMENT (mode);
862
863  gcc_assert (can_create_pseudo_p ());
864
865  /* If a virtual register with bigger mode alignment is generated,
866     increase stack alignment estimation because it might be spilled
867     to stack later.  */
868  if (SUPPORTS_STACK_ALIGNMENT
869      && crtl->stack_alignment_estimated < align
870      && !crtl->stack_realign_processed)
871    {
872      unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
873      if (crtl->stack_alignment_estimated < min_align)
874	crtl->stack_alignment_estimated = min_align;
875    }
876
877  if (generating_concat_p
878      && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
879	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
880    {
881      /* For complex modes, don't make a single pseudo.
882	 Instead, make a CONCAT of two pseudos.
883	 This allows noncontiguous allocation of the real and imaginary parts,
884	 which makes much better code.  Besides, allocating DCmode
885	 pseudos overstrains reload on some machines like the 386.  */
886      rtx realpart, imagpart;
887      enum machine_mode partmode = GET_MODE_INNER (mode);
888
889      realpart = gen_reg_rtx (partmode);
890      imagpart = gen_reg_rtx (partmode);
891      return gen_rtx_CONCAT (mode, realpart, imagpart);
892    }
893
894  /* Make sure regno_pointer_align, and regno_reg_rtx are large
895     enough to have an element for this pseudo reg number.  */
896
897  if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
898    {
899      int old_size = crtl->emit.regno_pointer_align_length;
900      char *tmp;
901      rtx *new1;
902
903      tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
904      memset (tmp + old_size, 0, old_size);
905      crtl->emit.regno_pointer_align = (unsigned char *) tmp;
906
907      new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
908      memset (new1 + old_size, 0, old_size * sizeof (rtx));
909      regno_reg_rtx = new1;
910
911      crtl->emit.regno_pointer_align_length = old_size * 2;
912    }
913
914  val = gen_raw_REG (mode, reg_rtx_no);
915  regno_reg_rtx[reg_rtx_no++] = val;
916  return val;
917}
918
919/* Update NEW with the same attributes as REG, but with OFFSET added
920   to the REG_OFFSET.  */
921
922static void
923update_reg_offset (rtx new_rtx, rtx reg, int offset)
924{
925  REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
926				   REG_OFFSET (reg) + offset);
927}
928
929/* Generate a register with same attributes as REG, but with OFFSET
930   added to the REG_OFFSET.  */
931
932rtx
933gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
934		    int offset)
935{
936  rtx new_rtx = gen_rtx_REG (mode, regno);
937
938  update_reg_offset (new_rtx, reg, offset);
939  return new_rtx;
940}
941
942/* Generate a new pseudo-register with the same attributes as REG, but
943   with OFFSET added to the REG_OFFSET.  */
944
945rtx
946gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
947{
948  rtx new_rtx = gen_reg_rtx (mode);
949
950  update_reg_offset (new_rtx, reg, offset);
951  return new_rtx;
952}
953
954/* Adjust REG in-place so that it has mode MODE.  It is assumed that the
955   new register is a (possibly paradoxical) lowpart of the old one.  */
956
957void
958adjust_reg_mode (rtx reg, enum machine_mode mode)
959{
960  update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
961  PUT_MODE (reg, mode);
962}
963
964/* Copy REG's attributes from X, if X has any attributes.  If REG and X
965   have different modes, REG is a (possibly paradoxical) lowpart of X.  */
966
967void
968set_reg_attrs_from_value (rtx reg, rtx x)
969{
970  int offset;
971
972  /* Hard registers can be reused for multiple purposes within the same
973     function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
974     on them is wrong.  */
975  if (HARD_REGISTER_P (reg))
976    return;
977
978  offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
979  if (MEM_P (x))
980    {
981      if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
982	REG_ATTRS (reg)
983	  = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
984      if (MEM_POINTER (x))
985	mark_reg_pointer (reg, 0);
986    }
987  else if (REG_P (x))
988    {
989      if (REG_ATTRS (x))
990	update_reg_offset (reg, x, offset);
991      if (REG_POINTER (x))
992	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
993    }
994}
995
996/* Generate a REG rtx for a new pseudo register, copying the mode
997   and attributes from X.  */
998
999rtx
1000gen_reg_rtx_and_attrs (rtx x)
1001{
1002  rtx reg = gen_reg_rtx (GET_MODE (x));
1003  set_reg_attrs_from_value (reg, x);
1004  return reg;
1005}
1006
1007/* Set the register attributes for registers contained in PARM_RTX.
1008   Use needed values from memory attributes of MEM.  */
1009
1010void
1011set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1012{
1013  if (REG_P (parm_rtx))
1014    set_reg_attrs_from_value (parm_rtx, mem);
1015  else if (GET_CODE (parm_rtx) == PARALLEL)
1016    {
1017      /* Check for a NULL entry in the first slot, used to indicate that the
1018	 parameter goes both on the stack and in registers.  */
1019      int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1020      for (; i < XVECLEN (parm_rtx, 0); i++)
1021	{
1022	  rtx x = XVECEXP (parm_rtx, 0, i);
1023	  if (REG_P (XEXP (x, 0)))
1024	    REG_ATTRS (XEXP (x, 0))
1025	      = get_reg_attrs (MEM_EXPR (mem),
1026			       INTVAL (XEXP (x, 1)));
1027	}
1028    }
1029}
1030
1031/* Set the REG_ATTRS for registers in value X, given that X represents
1032   decl T.  */
1033
1034void
1035set_reg_attrs_for_decl_rtl (tree t, rtx x)
1036{
1037  if (GET_CODE (x) == SUBREG)
1038    {
1039      gcc_assert (subreg_lowpart_p (x));
1040      x = SUBREG_REG (x);
1041    }
1042  if (REG_P (x))
1043    REG_ATTRS (x)
1044      = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1045					       DECL_MODE (t)));
1046  if (GET_CODE (x) == CONCAT)
1047    {
1048      if (REG_P (XEXP (x, 0)))
1049        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1050      if (REG_P (XEXP (x, 1)))
1051	REG_ATTRS (XEXP (x, 1))
1052	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1053    }
1054  if (GET_CODE (x) == PARALLEL)
1055    {
1056      int i, start;
1057
1058      /* Check for a NULL entry, used to indicate that the parameter goes
1059	 both on the stack and in registers.  */
1060      if (XEXP (XVECEXP (x, 0, 0), 0))
1061	start = 0;
1062      else
1063	start = 1;
1064
1065      for (i = start; i < XVECLEN (x, 0); i++)
1066	{
1067	  rtx y = XVECEXP (x, 0, i);
1068	  if (REG_P (XEXP (y, 0)))
1069	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1070	}
1071    }
1072}
1073
1074/* Assign the RTX X to declaration T.  */
1075
1076void
1077set_decl_rtl (tree t, rtx x)
1078{
1079  DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1080  if (x)
1081    set_reg_attrs_for_decl_rtl (t, x);
1082}
1083
1084/* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1085   if the ABI requires the parameter to be passed by reference.  */
1086
1087void
1088set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1089{
1090  DECL_INCOMING_RTL (t) = x;
1091  if (x && !by_reference_p)
1092    set_reg_attrs_for_decl_rtl (t, x);
1093}
1094
1095/* Identify REG (which may be a CONCAT) as a user register.  */
1096
1097void
1098mark_user_reg (rtx reg)
1099{
1100  if (GET_CODE (reg) == CONCAT)
1101    {
1102      REG_USERVAR_P (XEXP (reg, 0)) = 1;
1103      REG_USERVAR_P (XEXP (reg, 1)) = 1;
1104    }
1105  else
1106    {
1107      gcc_assert (REG_P (reg));
1108      REG_USERVAR_P (reg) = 1;
1109    }
1110}
1111
1112/* Identify REG as a probable pointer register and show its alignment
1113   as ALIGN, if nonzero.  */
1114
1115void
1116mark_reg_pointer (rtx reg, int align)
1117{
1118  if (! REG_POINTER (reg))
1119    {
1120      REG_POINTER (reg) = 1;
1121
1122      if (align)
1123	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1124    }
1125  else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1126    /* We can no-longer be sure just how aligned this pointer is.  */
1127    REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1128}
1129
1130/* Return 1 plus largest pseudo reg number used in the current function.  */
1131
1132int
1133max_reg_num (void)
1134{
1135  return reg_rtx_no;
1136}
1137
1138/* Return 1 + the largest label number used so far in the current function.  */
1139
1140int
1141max_label_num (void)
1142{
1143  return label_num;
1144}
1145
1146/* Return first label number used in this function (if any were used).  */
1147
1148int
1149get_first_label_num (void)
1150{
1151  return first_label_num;
1152}
1153
1154/* If the rtx for label was created during the expansion of a nested
1155   function, then first_label_num won't include this label number.
1156   Fix this now so that array indices work later.  */
1157
1158void
1159maybe_set_first_label_num (rtx x)
1160{
1161  if (CODE_LABEL_NUMBER (x) < first_label_num)
1162    first_label_num = CODE_LABEL_NUMBER (x);
1163}
1164
1165/* Return a value representing some low-order bits of X, where the number
1166   of low-order bits is given by MODE.  Note that no conversion is done
1167   between floating-point and fixed-point values, rather, the bit
1168   representation is returned.
1169
1170   This function handles the cases in common between gen_lowpart, below,
1171   and two variants in cse.c and combine.c.  These are the cases that can
1172   be safely handled at all points in the compilation.
1173
1174   If this is not a case we can handle, return 0.  */
1175
1176rtx
1177gen_lowpart_common (enum machine_mode mode, rtx x)
1178{
1179  int msize = GET_MODE_SIZE (mode);
1180  int xsize;
1181  int offset = 0;
1182  enum machine_mode innermode;
1183
1184  /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1185     so we have to make one up.  Yuk.  */
1186  innermode = GET_MODE (x);
1187  if (CONST_INT_P (x)
1188      && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1189    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1190  else if (innermode == VOIDmode)
1191    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1192
1193  xsize = GET_MODE_SIZE (innermode);
1194
1195  gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1196
1197  if (innermode == mode)
1198    return x;
1199
1200  /* MODE must occupy no more words than the mode of X.  */
1201  if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1202      > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1203    return 0;
1204
1205  /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1206  if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1207    return 0;
1208
1209  offset = subreg_lowpart_offset (mode, innermode);
1210
1211  if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1212      && (GET_MODE_CLASS (mode) == MODE_INT
1213	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1214    {
1215      /* If we are getting the low-order part of something that has been
1216	 sign- or zero-extended, we can either just use the object being
1217	 extended or make a narrower extension.  If we want an even smaller
1218	 piece than the size of the object being extended, call ourselves
1219	 recursively.
1220
1221	 This case is used mostly by combine and cse.  */
1222
1223      if (GET_MODE (XEXP (x, 0)) == mode)
1224	return XEXP (x, 0);
1225      else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1226	return gen_lowpart_common (mode, XEXP (x, 0));
1227      else if (msize < xsize)
1228	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1229    }
1230  else if (GET_CODE (x) == SUBREG || REG_P (x)
1231	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1232	   || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1233    return simplify_gen_subreg (mode, x, innermode, offset);
1234
1235  /* Otherwise, we can't do this.  */
1236  return 0;
1237}
1238
1239rtx
1240gen_highpart (enum machine_mode mode, rtx x)
1241{
1242  unsigned int msize = GET_MODE_SIZE (mode);
1243  rtx result;
1244
1245  /* This case loses if X is a subreg.  To catch bugs early,
1246     complain if an invalid MODE is used even in other cases.  */
1247  gcc_assert (msize <= UNITS_PER_WORD
1248	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1249
1250  result = simplify_gen_subreg (mode, x, GET_MODE (x),
1251				subreg_highpart_offset (mode, GET_MODE (x)));
1252  gcc_assert (result);
1253
1254  /* simplify_gen_subreg is not guaranteed to return a valid operand for
1255     the target if we have a MEM.  gen_highpart must return a valid operand,
1256     emitting code if necessary to do so.  */
1257  if (MEM_P (result))
1258    {
1259      result = validize_mem (result);
1260      gcc_assert (result);
1261    }
1262
1263  return result;
1264}
1265
1266/* Like gen_highpart, but accept mode of EXP operand in case EXP can
1267   be VOIDmode constant.  */
1268rtx
1269gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1270{
1271  if (GET_MODE (exp) != VOIDmode)
1272    {
1273      gcc_assert (GET_MODE (exp) == innermode);
1274      return gen_highpart (outermode, exp);
1275    }
1276  return simplify_gen_subreg (outermode, exp, innermode,
1277			      subreg_highpart_offset (outermode, innermode));
1278}
1279
1280/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value.  */
1281
1282unsigned int
1283subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1284{
1285  unsigned int offset = 0;
1286  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1287
1288  if (difference > 0)
1289    {
1290      if (WORDS_BIG_ENDIAN)
1291	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1292      if (BYTES_BIG_ENDIAN)
1293	offset += difference % UNITS_PER_WORD;
1294    }
1295
1296  return offset;
1297}
1298
1299/* Return offset in bytes to get OUTERMODE high part
1300   of the value in mode INNERMODE stored in memory in target format.  */
1301unsigned int
1302subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1303{
1304  unsigned int offset = 0;
1305  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1306
1307  gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1308
1309  if (difference > 0)
1310    {
1311      if (! WORDS_BIG_ENDIAN)
1312	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1313      if (! BYTES_BIG_ENDIAN)
1314	offset += difference % UNITS_PER_WORD;
1315    }
1316
1317  return offset;
1318}
1319
1320/* Return 1 iff X, assumed to be a SUBREG,
1321   refers to the least significant part of its containing reg.
1322   If X is not a SUBREG, always return 1 (it is its own low part!).  */
1323
1324int
1325subreg_lowpart_p (const_rtx x)
1326{
1327  if (GET_CODE (x) != SUBREG)
1328    return 1;
1329  else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1330    return 0;
1331
1332  return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1333	  == SUBREG_BYTE (x));
1334}
1335
1336/* Return subword OFFSET of operand OP.
1337   The word number, OFFSET, is interpreted as the word number starting
1338   at the low-order address.  OFFSET 0 is the low-order word if not
1339   WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1340
1341   If we cannot extract the required word, we return zero.  Otherwise,
1342   an rtx corresponding to the requested word will be returned.
1343
1344   VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1345   reload has completed, a valid address will always be returned.  After
1346   reload, if a valid address cannot be returned, we return zero.
1347
1348   If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1349   it is the responsibility of the caller.
1350
1351   MODE is the mode of OP in case it is a CONST_INT.
1352
1353   ??? This is still rather broken for some cases.  The problem for the
1354   moment is that all callers of this thing provide no 'goal mode' to
1355   tell us to work with.  This exists because all callers were written
1356   in a word based SUBREG world.
1357   Now use of this function can be deprecated by simplify_subreg in most
1358   cases.
1359 */
1360
1361rtx
1362operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1363{
1364  if (mode == VOIDmode)
1365    mode = GET_MODE (op);
1366
1367  gcc_assert (mode != VOIDmode);
1368
1369  /* If OP is narrower than a word, fail.  */
1370  if (mode != BLKmode
1371      && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1372    return 0;
1373
1374  /* If we want a word outside OP, return zero.  */
1375  if (mode != BLKmode
1376      && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1377    return const0_rtx;
1378
1379  /* Form a new MEM at the requested address.  */
1380  if (MEM_P (op))
1381    {
1382      rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1383
1384      if (! validate_address)
1385	return new_rtx;
1386
1387      else if (reload_completed)
1388	{
1389	  if (! strict_memory_address_addr_space_p (word_mode,
1390						    XEXP (new_rtx, 0),
1391						    MEM_ADDR_SPACE (op)))
1392	    return 0;
1393	}
1394      else
1395	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1396    }
1397
1398  /* Rest can be handled by simplify_subreg.  */
1399  return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1400}
1401
1402/* Similar to `operand_subword', but never return 0.  If we can't
1403   extract the required subword, put OP into a register and try again.
1404   The second attempt must succeed.  We always validate the address in
1405   this case.
1406
1407   MODE is the mode of OP, in case it is CONST_INT.  */
1408
1409rtx
1410operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1411{
1412  rtx result = operand_subword (op, offset, 1, mode);
1413
1414  if (result)
1415    return result;
1416
1417  if (mode != BLKmode && mode != VOIDmode)
1418    {
1419      /* If this is a register which can not be accessed by words, copy it
1420	 to a pseudo register.  */
1421      if (REG_P (op))
1422	op = copy_to_reg (op);
1423      else
1424	op = force_reg (mode, op);
1425    }
1426
1427  result = operand_subword (op, offset, 1, mode);
1428  gcc_assert (result);
1429
1430  return result;
1431}
1432
1433/* Returns 1 if both MEM_EXPR can be considered equal
1434   and 0 otherwise.  */
1435
1436int
1437mem_expr_equal_p (const_tree expr1, const_tree expr2)
1438{
1439  if (expr1 == expr2)
1440    return 1;
1441
1442  if (! expr1 || ! expr2)
1443    return 0;
1444
1445  if (TREE_CODE (expr1) != TREE_CODE (expr2))
1446    return 0;
1447
1448  return operand_equal_p (expr1, expr2, 0);
1449}
1450
1451/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1452   bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1453   -1 if not known.  */
1454
1455int
1456get_mem_align_offset (rtx mem, unsigned int align)
1457{
1458  tree expr;
1459  unsigned HOST_WIDE_INT offset;
1460
1461  /* This function can't use
1462     if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1463	 || !CONST_INT_P (MEM_OFFSET (mem))
1464	 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1465	     < align))
1466       return -1;
1467     else
1468       return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1469     for two reasons:
1470     - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1471       for <variable>.  get_inner_reference doesn't handle it and
1472       even if it did, the alignment in that case needs to be determined
1473       from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1474     - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1475       isn't sufficiently aligned, the object it is in might be.  */
1476  gcc_assert (MEM_P (mem));
1477  expr = MEM_EXPR (mem);
1478  if (expr == NULL_TREE
1479      || MEM_OFFSET (mem) == NULL_RTX
1480      || !CONST_INT_P (MEM_OFFSET (mem)))
1481    return -1;
1482
1483  offset = INTVAL (MEM_OFFSET (mem));
1484  if (DECL_P (expr))
1485    {
1486      if (DECL_ALIGN (expr) < align)
1487	return -1;
1488    }
1489  else if (INDIRECT_REF_P (expr))
1490    {
1491      if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1492	return -1;
1493    }
1494  else if (TREE_CODE (expr) == COMPONENT_REF)
1495    {
1496      while (1)
1497	{
1498	  tree inner = TREE_OPERAND (expr, 0);
1499	  tree field = TREE_OPERAND (expr, 1);
1500	  tree byte_offset = component_ref_field_offset (expr);
1501	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1502
1503	  if (!byte_offset
1504	      || !host_integerp (byte_offset, 1)
1505	      || !host_integerp (bit_offset, 1))
1506	    return -1;
1507
1508	  offset += tree_low_cst (byte_offset, 1);
1509	  offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1510
1511	  if (inner == NULL_TREE)
1512	    {
1513	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1514		  < (unsigned int) align)
1515		return -1;
1516	      break;
1517	    }
1518	  else if (DECL_P (inner))
1519	    {
1520	      if (DECL_ALIGN (inner) < align)
1521		return -1;
1522	      break;
1523	    }
1524	  else if (TREE_CODE (inner) != COMPONENT_REF)
1525	    return -1;
1526	  expr = inner;
1527	}
1528    }
1529  else
1530    return -1;
1531
1532  return offset & ((align / BITS_PER_UNIT) - 1);
1533}
1534
1535/* Given REF (a MEM) and T, either the type of X or the expression
1536   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1537   if we are making a new object of this type.  BITPOS is nonzero if
1538   there is an offset outstanding on T that will be applied later.  */
1539
1540void
1541set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1542				 HOST_WIDE_INT bitpos)
1543{
1544  alias_set_type alias = MEM_ALIAS_SET (ref);
1545  tree expr = MEM_EXPR (ref);
1546  rtx offset = MEM_OFFSET (ref);
1547  rtx size = MEM_SIZE (ref);
1548  unsigned int align = MEM_ALIGN (ref);
1549  HOST_WIDE_INT apply_bitpos = 0;
1550  tree type;
1551
1552  /* It can happen that type_for_mode was given a mode for which there
1553     is no language-level type.  In which case it returns NULL, which
1554     we can see here.  */
1555  if (t == NULL_TREE)
1556    return;
1557
1558  type = TYPE_P (t) ? t : TREE_TYPE (t);
1559  if (type == error_mark_node)
1560    return;
1561
1562  /* If we have already set DECL_RTL = ref, get_alias_set will get the
1563     wrong answer, as it assumes that DECL_RTL already has the right alias
1564     info.  Callers should not set DECL_RTL until after the call to
1565     set_mem_attributes.  */
1566  gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1567
1568  /* Get the alias set from the expression or type (perhaps using a
1569     front-end routine) and use it.  */
1570  alias = get_alias_set (t);
1571
1572  MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1573  MEM_IN_STRUCT_P (ref)
1574    = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1575  MEM_POINTER (ref) = POINTER_TYPE_P (type);
1576
1577  /* If we are making an object of this type, or if this is a DECL, we know
1578     that it is a scalar if the type is not an aggregate.  */
1579  if ((objectp || DECL_P (t))
1580      && ! AGGREGATE_TYPE_P (type)
1581      && TREE_CODE (type) != COMPLEX_TYPE)
1582    MEM_SCALAR_P (ref) = 1;
1583
1584  /* We can set the alignment from the type if we are making an object,
1585     this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1586  if (objectp || TREE_CODE (t) == INDIRECT_REF
1587      || TREE_CODE (t) == ALIGN_INDIRECT_REF
1588      || TYPE_ALIGN_OK (type))
1589    align = MAX (align, TYPE_ALIGN (type));
1590  else
1591    if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1592      {
1593	if (integer_zerop (TREE_OPERAND (t, 1)))
1594	  /* We don't know anything about the alignment.  */
1595	  align = BITS_PER_UNIT;
1596	else
1597	  align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1598      }
1599
1600  /* If the size is known, we can set that.  */
1601  if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1602    size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1603
1604  /* If T is not a type, we may be able to deduce some more information about
1605     the expression.  */
1606  if (! TYPE_P (t))
1607    {
1608      tree base;
1609      bool align_computed = false;
1610
1611      if (TREE_THIS_VOLATILE (t))
1612	MEM_VOLATILE_P (ref) = 1;
1613
1614      /* Now remove any conversions: they don't change what the underlying
1615	 object is.  Likewise for SAVE_EXPR.  */
1616      while (CONVERT_EXPR_P (t)
1617	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1618	     || TREE_CODE (t) == SAVE_EXPR)
1619	t = TREE_OPERAND (t, 0);
1620
1621      /* We may look through structure-like accesses for the purposes of
1622	 examining TREE_THIS_NOTRAP, but not array-like accesses.  */
1623      base = t;
1624      while (TREE_CODE (base) == COMPONENT_REF
1625	     || TREE_CODE (base) == REALPART_EXPR
1626	     || TREE_CODE (base) == IMAGPART_EXPR
1627	     || TREE_CODE (base) == BIT_FIELD_REF)
1628	base = TREE_OPERAND (base, 0);
1629
1630      if (DECL_P (base))
1631	{
1632	  if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1633	    MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1634	  else
1635	    MEM_NOTRAP_P (ref) = 1;
1636	}
1637      else
1638	MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1639
1640      base = get_base_address (base);
1641      if (base && DECL_P (base)
1642	  && TREE_READONLY (base)
1643	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1644	{
1645	  tree base_type = TREE_TYPE (base);
1646	  gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1647		      || DECL_ARTIFICIAL (base));
1648	  MEM_READONLY_P (ref) = 1;
1649	}
1650
1651      /* If this expression uses it's parent's alias set, mark it such
1652	 that we won't change it.  */
1653      if (component_uses_parent_alias_set (t))
1654	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1655
1656      /* If this is a decl, set the attributes of the MEM from it.  */
1657      if (DECL_P (t))
1658	{
1659	  expr = t;
1660	  offset = const0_rtx;
1661	  apply_bitpos = bitpos;
1662	  size = (DECL_SIZE_UNIT (t)
1663		  && host_integerp (DECL_SIZE_UNIT (t), 1)
1664		  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1665	  align = DECL_ALIGN (t);
1666	  align_computed = true;
1667	}
1668
1669      /* If this is a constant, we know the alignment.  */
1670      else if (CONSTANT_CLASS_P (t))
1671	{
1672	  align = TYPE_ALIGN (type);
1673#ifdef CONSTANT_ALIGNMENT
1674	  align = CONSTANT_ALIGNMENT (t, align);
1675#endif
1676	  align_computed = true;
1677	}
1678
1679      /* If this is a field reference and not a bit-field, record it.  */
1680      /* ??? There is some information that can be gleaned from bit-fields,
1681	 such as the word offset in the structure that might be modified.
1682	 But skip it for now.  */
1683      else if (TREE_CODE (t) == COMPONENT_REF
1684	       && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1685	{
1686	  expr = t;
1687	  offset = const0_rtx;
1688	  apply_bitpos = bitpos;
1689	  /* ??? Any reason the field size would be different than
1690	     the size we got from the type?  */
1691	}
1692
1693      /* If this is an array reference, look for an outer field reference.  */
1694      else if (TREE_CODE (t) == ARRAY_REF)
1695	{
1696	  tree off_tree = size_zero_node;
1697	  /* We can't modify t, because we use it at the end of the
1698	     function.  */
1699	  tree t2 = t;
1700
1701	  do
1702	    {
1703	      tree index = TREE_OPERAND (t2, 1);
1704	      tree low_bound = array_ref_low_bound (t2);
1705	      tree unit_size = array_ref_element_size (t2);
1706
1707	      /* We assume all arrays have sizes that are a multiple of a byte.
1708		 First subtract the lower bound, if any, in the type of the
1709		 index, then convert to sizetype and multiply by the size of
1710		 the array element.  */
1711	      if (! integer_zerop (low_bound))
1712		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1713				     index, low_bound);
1714
1715	      off_tree = size_binop (PLUS_EXPR,
1716				     size_binop (MULT_EXPR,
1717						 fold_convert (sizetype,
1718							       index),
1719						 unit_size),
1720				     off_tree);
1721	      t2 = TREE_OPERAND (t2, 0);
1722	    }
1723	  while (TREE_CODE (t2) == ARRAY_REF);
1724
1725	  if (DECL_P (t2))
1726	    {
1727	      expr = t2;
1728	      offset = NULL;
1729	      if (host_integerp (off_tree, 1))
1730		{
1731		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1732		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1733		  align = DECL_ALIGN (t2);
1734		  if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1735	            align = aoff;
1736		  align_computed = true;
1737		  offset = GEN_INT (ioff);
1738		  apply_bitpos = bitpos;
1739		}
1740	    }
1741	  else if (TREE_CODE (t2) == COMPONENT_REF)
1742	    {
1743	      expr = t2;
1744	      offset = NULL;
1745	      if (host_integerp (off_tree, 1))
1746		{
1747		  offset = GEN_INT (tree_low_cst (off_tree, 1));
1748		  apply_bitpos = bitpos;
1749		}
1750	      /* ??? Any reason the field size would be different than
1751		 the size we got from the type?  */
1752	    }
1753	  else if (flag_argument_noalias > 1
1754		   && (INDIRECT_REF_P (t2))
1755		   && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1756	    {
1757	      expr = t2;
1758	      offset = NULL;
1759	    }
1760	}
1761
1762      /* If this is a Fortran indirect argument reference, record the
1763	 parameter decl.  */
1764      else if (flag_argument_noalias > 1
1765	       && (INDIRECT_REF_P (t))
1766	       && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1767	{
1768	  expr = t;
1769	  offset = NULL;
1770	}
1771
1772      if (!align_computed && !INDIRECT_REF_P (t))
1773	{
1774	  unsigned int obj_align
1775	    = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1776	  align = MAX (align, obj_align);
1777	}
1778    }
1779
1780  /* If we modified OFFSET based on T, then subtract the outstanding
1781     bit position offset.  Similarly, increase the size of the accessed
1782     object to contain the negative offset.  */
1783  if (apply_bitpos)
1784    {
1785      offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1786      if (size)
1787	size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1788    }
1789
1790  if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1791    {
1792      /* Force EXPR and OFFSET to NULL, since we don't know exactly what
1793	 we're overlapping.  */
1794      offset = NULL;
1795      expr = NULL;
1796    }
1797
1798  /* Now set the attributes we computed above.  */
1799  MEM_ATTRS (ref)
1800    = get_mem_attrs (alias, expr, offset, size, align,
1801		     TYPE_ADDR_SPACE (type), GET_MODE (ref));
1802
1803  /* If this is already known to be a scalar or aggregate, we are done.  */
1804  if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1805    return;
1806
1807  /* If it is a reference into an aggregate, this is part of an aggregate.
1808     Otherwise we don't know.  */
1809  else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1810	   || TREE_CODE (t) == ARRAY_RANGE_REF
1811	   || TREE_CODE (t) == BIT_FIELD_REF)
1812    MEM_IN_STRUCT_P (ref) = 1;
1813}
1814
1815void
1816set_mem_attributes (rtx ref, tree t, int objectp)
1817{
1818  set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1819}
1820
1821/* Set the alias set of MEM to SET.  */
1822
1823void
1824set_mem_alias_set (rtx mem, alias_set_type set)
1825{
1826#ifdef ENABLE_CHECKING
1827  /* If the new and old alias sets don't conflict, something is wrong.  */
1828  gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1829#endif
1830
1831  MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1832				   MEM_SIZE (mem), MEM_ALIGN (mem),
1833				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
1834}
1835
1836/* Set the address space of MEM to ADDRSPACE (target-defined).  */
1837
1838void
1839set_mem_addr_space (rtx mem, addr_space_t addrspace)
1840{
1841  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1842				   MEM_OFFSET (mem), MEM_SIZE (mem),
1843				   MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1844}
1845
1846/* Set the alignment of MEM to ALIGN bits.  */
1847
1848void
1849set_mem_align (rtx mem, unsigned int align)
1850{
1851  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1852				   MEM_OFFSET (mem), MEM_SIZE (mem), align,
1853				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
1854}
1855
1856/* Set the expr for MEM to EXPR.  */
1857
1858void
1859set_mem_expr (rtx mem, tree expr)
1860{
1861  MEM_ATTRS (mem)
1862    = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1863		     MEM_SIZE (mem), MEM_ALIGN (mem),
1864		     MEM_ADDR_SPACE (mem), GET_MODE (mem));
1865}
1866
1867/* Set the offset of MEM to OFFSET.  */
1868
1869void
1870set_mem_offset (rtx mem, rtx offset)
1871{
1872  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1873				   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1874				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
1875}
1876
1877/* Set the size of MEM to SIZE.  */
1878
1879void
1880set_mem_size (rtx mem, rtx size)
1881{
1882  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1883				   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1884				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
1885}
1886
1887/* Return a memory reference like MEMREF, but with its mode changed to MODE
1888   and its address changed to ADDR.  (VOIDmode means don't change the mode.
1889   NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1890   returned memory location is required to be valid.  The memory
1891   attributes are not changed.  */
1892
1893static rtx
1894change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1895{
1896  addr_space_t as;
1897  rtx new_rtx;
1898
1899  gcc_assert (MEM_P (memref));
1900  as = MEM_ADDR_SPACE (memref);
1901  if (mode == VOIDmode)
1902    mode = GET_MODE (memref);
1903  if (addr == 0)
1904    addr = XEXP (memref, 0);
1905  if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1906      && (!validate || memory_address_addr_space_p (mode, addr, as)))
1907    return memref;
1908
1909  if (validate)
1910    {
1911      if (reload_in_progress || reload_completed)
1912	gcc_assert (memory_address_addr_space_p (mode, addr, as));
1913      else
1914	addr = memory_address_addr_space (mode, addr, as);
1915    }
1916
1917  if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1918    return memref;
1919
1920  new_rtx = gen_rtx_MEM (mode, addr);
1921  MEM_COPY_ATTRIBUTES (new_rtx, memref);
1922  return new_rtx;
1923}
1924
1925/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1926   way we are changing MEMREF, so we only preserve the alias set.  */
1927
1928rtx
1929change_address (rtx memref, enum machine_mode mode, rtx addr)
1930{
1931  rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1932  enum machine_mode mmode = GET_MODE (new_rtx);
1933  unsigned int align;
1934
1935  size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1936  align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1937
1938  /* If there are no changes, just return the original memory reference.  */
1939  if (new_rtx == memref)
1940    {
1941      if (MEM_ATTRS (memref) == 0
1942	  || (MEM_EXPR (memref) == NULL
1943	      && MEM_OFFSET (memref) == NULL
1944	      && MEM_SIZE (memref) == size
1945	      && MEM_ALIGN (memref) == align))
1946	return new_rtx;
1947
1948      new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1949      MEM_COPY_ATTRIBUTES (new_rtx, memref);
1950    }
1951
1952  MEM_ATTRS (new_rtx)
1953    = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1954		     MEM_ADDR_SPACE (memref), mmode);
1955
1956  return new_rtx;
1957}
1958
1959/* Return a memory reference like MEMREF, but with its mode changed
1960   to MODE and its address offset by OFFSET bytes.  If VALIDATE is
1961   nonzero, the memory address is forced to be valid.
1962   If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1963   and caller is responsible for adjusting MEMREF base register.  */
1964
1965rtx
1966adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1967		  int validate, int adjust)
1968{
1969  rtx addr = XEXP (memref, 0);
1970  rtx new_rtx;
1971  rtx memoffset = MEM_OFFSET (memref);
1972  rtx size = 0;
1973  unsigned int memalign = MEM_ALIGN (memref);
1974  addr_space_t as = MEM_ADDR_SPACE (memref);
1975  enum machine_mode address_mode = targetm.addr_space.address_mode (as);
1976  int pbits;
1977
1978  /* If there are no changes, just return the original memory reference.  */
1979  if (mode == GET_MODE (memref) && !offset
1980      && (!validate || memory_address_addr_space_p (mode, addr, as)))
1981    return memref;
1982
1983  /* ??? Prefer to create garbage instead of creating shared rtl.
1984     This may happen even if offset is nonzero -- consider
1985     (plus (plus reg reg) const_int) -- so do this always.  */
1986  addr = copy_rtx (addr);
1987
1988  /* Convert a possibly large offset to a signed value within the
1989     range of the target address space.  */
1990  pbits = GET_MODE_BITSIZE (address_mode);
1991  if (HOST_BITS_PER_WIDE_INT > pbits)
1992    {
1993      int shift = HOST_BITS_PER_WIDE_INT - pbits;
1994      offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
1995		>> shift);
1996    }
1997
1998  if (adjust)
1999    {
2000      /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2001	 object, we can merge it into the LO_SUM.  */
2002      if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2003	  && offset >= 0
2004	  && (unsigned HOST_WIDE_INT) offset
2005	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2006	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2007			       plus_constant (XEXP (addr, 1), offset));
2008      else
2009	addr = plus_constant (addr, offset);
2010    }
2011
2012  new_rtx = change_address_1 (memref, mode, addr, validate);
2013
2014  /* If the address is a REG, change_address_1 rightfully returns memref,
2015     but this would destroy memref's MEM_ATTRS.  */
2016  if (new_rtx == memref && offset != 0)
2017    new_rtx = copy_rtx (new_rtx);
2018
2019  /* Compute the new values of the memory attributes due to this adjustment.
2020     We add the offsets and update the alignment.  */
2021  if (memoffset)
2022    memoffset = GEN_INT (offset + INTVAL (memoffset));
2023
2024  /* Compute the new alignment by taking the MIN of the alignment and the
2025     lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2026     if zero.  */
2027  if (offset != 0)
2028    memalign
2029      = MIN (memalign,
2030	     (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2031
2032  /* We can compute the size in a number of ways.  */
2033  if (GET_MODE (new_rtx) != BLKmode)
2034    size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2035  else if (MEM_SIZE (memref))
2036    size = plus_constant (MEM_SIZE (memref), -offset);
2037
2038  MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2039				       memoffset, size, memalign, as,
2040				       GET_MODE (new_rtx));
2041
2042  /* At some point, we should validate that this offset is within the object,
2043     if all the appropriate values are known.  */
2044  return new_rtx;
2045}
2046
2047/* Return a memory reference like MEMREF, but with its mode changed
2048   to MODE and its address changed to ADDR, which is assumed to be
2049   MEMREF offset by OFFSET bytes.  If VALIDATE is
2050   nonzero, the memory address is forced to be valid.  */
2051
2052rtx
2053adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2054			     HOST_WIDE_INT offset, int validate)
2055{
2056  memref = change_address_1 (memref, VOIDmode, addr, validate);
2057  return adjust_address_1 (memref, mode, offset, validate, 0);
2058}
2059
2060/* Return a memory reference like MEMREF, but whose address is changed by
2061   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2062   known to be in OFFSET (possibly 1).  */
2063
2064rtx
2065offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2066{
2067  rtx new_rtx, addr = XEXP (memref, 0);
2068  addr_space_t as = MEM_ADDR_SPACE (memref);
2069  enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2070
2071  new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2072
2073  /* At this point we don't know _why_ the address is invalid.  It
2074     could have secondary memory references, multiplies or anything.
2075
2076     However, if we did go and rearrange things, we can wind up not
2077     being able to recognize the magic around pic_offset_table_rtx.
2078     This stuff is fragile, and is yet another example of why it is
2079     bad to expose PIC machinery too early.  */
2080  if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2081      && GET_CODE (addr) == PLUS
2082      && XEXP (addr, 0) == pic_offset_table_rtx)
2083    {
2084      addr = force_reg (GET_MODE (addr), addr);
2085      new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2086    }
2087
2088  update_temp_slot_address (XEXP (memref, 0), new_rtx);
2089  new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2090
2091  /* If there are no changes, just return the original memory reference.  */
2092  if (new_rtx == memref)
2093    return new_rtx;
2094
2095  /* Update the alignment to reflect the offset.  Reset the offset, which
2096     we don't know.  */
2097  MEM_ATTRS (new_rtx)
2098    = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2099		     MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2100		     as, GET_MODE (new_rtx));
2101  return new_rtx;
2102}
2103
2104/* Return a memory reference like MEMREF, but with its address changed to
2105   ADDR.  The caller is asserting that the actual piece of memory pointed
2106   to is the same, just the form of the address is being changed, such as
2107   by putting something into a register.  */
2108
2109rtx
2110replace_equiv_address (rtx memref, rtx addr)
2111{
2112  /* change_address_1 copies the memory attribute structure without change
2113     and that's exactly what we want here.  */
2114  update_temp_slot_address (XEXP (memref, 0), addr);
2115  return change_address_1 (memref, VOIDmode, addr, 1);
2116}
2117
2118/* Likewise, but the reference is not required to be valid.  */
2119
2120rtx
2121replace_equiv_address_nv (rtx memref, rtx addr)
2122{
2123  return change_address_1 (memref, VOIDmode, addr, 0);
2124}
2125
2126/* Return a memory reference like MEMREF, but with its mode widened to
2127   MODE and offset by OFFSET.  This would be used by targets that e.g.
2128   cannot issue QImode memory operations and have to use SImode memory
2129   operations plus masking logic.  */
2130
2131rtx
2132widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2133{
2134  rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2135  tree expr = MEM_EXPR (new_rtx);
2136  rtx memoffset = MEM_OFFSET (new_rtx);
2137  unsigned int size = GET_MODE_SIZE (mode);
2138
2139  /* If there are no changes, just return the original memory reference.  */
2140  if (new_rtx == memref)
2141    return new_rtx;
2142
2143  /* If we don't know what offset we were at within the expression, then
2144     we can't know if we've overstepped the bounds.  */
2145  if (! memoffset)
2146    expr = NULL_TREE;
2147
2148  while (expr)
2149    {
2150      if (TREE_CODE (expr) == COMPONENT_REF)
2151	{
2152	  tree field = TREE_OPERAND (expr, 1);
2153	  tree offset = component_ref_field_offset (expr);
2154
2155	  if (! DECL_SIZE_UNIT (field))
2156	    {
2157	      expr = NULL_TREE;
2158	      break;
2159	    }
2160
2161	  /* Is the field at least as large as the access?  If so, ok,
2162	     otherwise strip back to the containing structure.  */
2163	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2164	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2165	      && INTVAL (memoffset) >= 0)
2166	    break;
2167
2168	  if (! host_integerp (offset, 1))
2169	    {
2170	      expr = NULL_TREE;
2171	      break;
2172	    }
2173
2174	  expr = TREE_OPERAND (expr, 0);
2175	  memoffset
2176	    = (GEN_INT (INTVAL (memoffset)
2177			+ tree_low_cst (offset, 1)
2178			+ (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2179			   / BITS_PER_UNIT)));
2180	}
2181      /* Similarly for the decl.  */
2182      else if (DECL_P (expr)
2183	       && DECL_SIZE_UNIT (expr)
2184	       && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2185	       && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2186	       && (! memoffset || INTVAL (memoffset) >= 0))
2187	break;
2188      else
2189	{
2190	  /* The widened memory access overflows the expression, which means
2191	     that it could alias another expression.  Zap it.  */
2192	  expr = NULL_TREE;
2193	  break;
2194	}
2195    }
2196
2197  if (! expr)
2198    memoffset = NULL_RTX;
2199
2200  /* The widened memory may alias other stuff, so zap the alias set.  */
2201  /* ??? Maybe use get_alias_set on any remaining expression.  */
2202
2203  MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2204				       MEM_ALIGN (new_rtx),
2205				       MEM_ADDR_SPACE (new_rtx), mode);
2206
2207  return new_rtx;
2208}
2209
2210/* A fake decl that is used as the MEM_EXPR of spill slots.  */
2211static GTY(()) tree spill_slot_decl;
2212
2213tree
2214get_spill_slot_decl (bool force_build_p)
2215{
2216  tree d = spill_slot_decl;
2217  rtx rd;
2218
2219  if (d || !force_build_p)
2220    return d;
2221
2222  d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2223		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2224  DECL_ARTIFICIAL (d) = 1;
2225  DECL_IGNORED_P (d) = 1;
2226  TREE_USED (d) = 1;
2227  TREE_THIS_NOTRAP (d) = 1;
2228  spill_slot_decl = d;
2229
2230  rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2231  MEM_NOTRAP_P (rd) = 1;
2232  MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2233				  NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2234  SET_DECL_RTL (d, rd);
2235
2236  return d;
2237}
2238
2239/* Given MEM, a result from assign_stack_local, fill in the memory
2240   attributes as appropriate for a register allocator spill slot.
2241   These slots are not aliasable by other memory.  We arrange for
2242   them all to use a single MEM_EXPR, so that the aliasing code can
2243   work properly in the case of shared spill slots.  */
2244
2245void
2246set_mem_attrs_for_spill (rtx mem)
2247{
2248  alias_set_type alias;
2249  rtx addr, offset;
2250  tree expr;
2251
2252  expr = get_spill_slot_decl (true);
2253  alias = MEM_ALIAS_SET (DECL_RTL (expr));
2254
2255  /* We expect the incoming memory to be of the form:
2256	(mem:MODE (plus (reg sfp) (const_int offset)))
2257     with perhaps the plus missing for offset = 0.  */
2258  addr = XEXP (mem, 0);
2259  offset = const0_rtx;
2260  if (GET_CODE (addr) == PLUS
2261      && CONST_INT_P (XEXP (addr, 1)))
2262    offset = XEXP (addr, 1);
2263
2264  MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2265				   MEM_SIZE (mem), MEM_ALIGN (mem),
2266				   ADDR_SPACE_GENERIC, GET_MODE (mem));
2267  MEM_NOTRAP_P (mem) = 1;
2268}
2269
2270/* Return a newly created CODE_LABEL rtx with a unique label number.  */
2271
2272rtx
2273gen_label_rtx (void)
2274{
2275  return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2276			     NULL, label_num++, NULL);
2277}
2278
2279/* For procedure integration.  */
2280
2281/* Install new pointers to the first and last insns in the chain.
2282   Also, set cur_insn_uid to one higher than the last in use.
2283   Used for an inline-procedure after copying the insn chain.  */
2284
2285void
2286set_new_first_and_last_insn (rtx first, rtx last)
2287{
2288  rtx insn;
2289
2290  first_insn = first;
2291  last_insn = last;
2292  cur_insn_uid = 0;
2293
2294  if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2295    {
2296      int debug_count = 0;
2297
2298      cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2299      cur_debug_insn_uid = 0;
2300
2301      for (insn = first; insn; insn = NEXT_INSN (insn))
2302	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2303	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2304	else
2305	  {
2306	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2307	    if (DEBUG_INSN_P (insn))
2308	      debug_count++;
2309	  }
2310
2311      if (debug_count)
2312	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2313      else
2314	cur_debug_insn_uid++;
2315    }
2316  else
2317    for (insn = first; insn; insn = NEXT_INSN (insn))
2318      cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2319
2320  cur_insn_uid++;
2321}
2322
2323/* Go through all the RTL insn bodies and copy any invalid shared
2324   structure.  This routine should only be called once.  */
2325
2326static void
2327unshare_all_rtl_1 (rtx insn)
2328{
2329  /* Unshare just about everything else.  */
2330  unshare_all_rtl_in_chain (insn);
2331
2332  /* Make sure the addresses of stack slots found outside the insn chain
2333     (such as, in DECL_RTL of a variable) are not shared
2334     with the insn chain.
2335
2336     This special care is necessary when the stack slot MEM does not
2337     actually appear in the insn chain.  If it does appear, its address
2338     is unshared from all else at that point.  */
2339  stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2340}
2341
2342/* Go through all the RTL insn bodies and copy any invalid shared
2343   structure, again.  This is a fairly expensive thing to do so it
2344   should be done sparingly.  */
2345
2346void
2347unshare_all_rtl_again (rtx insn)
2348{
2349  rtx p;
2350  tree decl;
2351
2352  for (p = insn; p; p = NEXT_INSN (p))
2353    if (INSN_P (p))
2354      {
2355	reset_used_flags (PATTERN (p));
2356	reset_used_flags (REG_NOTES (p));
2357      }
2358
2359  /* Make sure that virtual stack slots are not shared.  */
2360  set_used_decls (DECL_INITIAL (cfun->decl));
2361
2362  /* Make sure that virtual parameters are not shared.  */
2363  for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2364    set_used_flags (DECL_RTL (decl));
2365
2366  reset_used_flags (stack_slot_list);
2367
2368  unshare_all_rtl_1 (insn);
2369}
2370
2371unsigned int
2372unshare_all_rtl (void)
2373{
2374  unshare_all_rtl_1 (get_insns ());
2375  return 0;
2376}
2377
2378struct rtl_opt_pass pass_unshare_all_rtl =
2379{
2380 {
2381  RTL_PASS,
2382  "unshare",                            /* name */
2383  NULL,                                 /* gate */
2384  unshare_all_rtl,                      /* execute */
2385  NULL,                                 /* sub */
2386  NULL,                                 /* next */
2387  0,                                    /* static_pass_number */
2388  TV_NONE,                              /* tv_id */
2389  0,                                    /* properties_required */
2390  0,                                    /* properties_provided */
2391  0,                                    /* properties_destroyed */
2392  0,                                    /* todo_flags_start */
2393  TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2394 }
2395};
2396
2397
2398/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2399   Recursively does the same for subexpressions.  */
2400
2401static void
2402verify_rtx_sharing (rtx orig, rtx insn)
2403{
2404  rtx x = orig;
2405  int i;
2406  enum rtx_code code;
2407  const char *format_ptr;
2408
2409  if (x == 0)
2410    return;
2411
2412  code = GET_CODE (x);
2413
2414  /* These types may be freely shared.  */
2415
2416  switch (code)
2417    {
2418    case REG:
2419    case DEBUG_EXPR:
2420    case VALUE:
2421    case CONST_INT:
2422    case CONST_DOUBLE:
2423    case CONST_FIXED:
2424    case CONST_VECTOR:
2425    case SYMBOL_REF:
2426    case LABEL_REF:
2427    case CODE_LABEL:
2428    case PC:
2429    case CC0:
2430    case SCRATCH:
2431      return;
2432      /* SCRATCH must be shared because they represent distinct values.  */
2433    case CLOBBER:
2434      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2435	return;
2436      break;
2437
2438    case CONST:
2439      if (shared_const_p (orig))
2440	return;
2441      break;
2442
2443    case MEM:
2444      /* A MEM is allowed to be shared if its address is constant.  */
2445      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2446	  || reload_completed || reload_in_progress)
2447	return;
2448
2449      break;
2450
2451    default:
2452      break;
2453    }
2454
2455  /* This rtx may not be shared.  If it has already been seen,
2456     replace it with a copy of itself.  */
2457#ifdef ENABLE_CHECKING
2458  if (RTX_FLAG (x, used))
2459    {
2460      error ("invalid rtl sharing found in the insn");
2461      debug_rtx (insn);
2462      error ("shared rtx");
2463      debug_rtx (x);
2464      internal_error ("internal consistency failure");
2465    }
2466#endif
2467  gcc_assert (!RTX_FLAG (x, used));
2468
2469  RTX_FLAG (x, used) = 1;
2470
2471  /* Now scan the subexpressions recursively.  */
2472
2473  format_ptr = GET_RTX_FORMAT (code);
2474
2475  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2476    {
2477      switch (*format_ptr++)
2478	{
2479	case 'e':
2480	  verify_rtx_sharing (XEXP (x, i), insn);
2481	  break;
2482
2483	case 'E':
2484	  if (XVEC (x, i) != NULL)
2485	    {
2486	      int j;
2487	      int len = XVECLEN (x, i);
2488
2489	      for (j = 0; j < len; j++)
2490		{
2491		  /* We allow sharing of ASM_OPERANDS inside single
2492		     instruction.  */
2493		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2494		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2495			  == ASM_OPERANDS))
2496		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2497		  else
2498		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2499		}
2500	    }
2501	  break;
2502	}
2503    }
2504  return;
2505}
2506
2507/* Go through all the RTL insn bodies and check that there is no unexpected
2508   sharing in between the subexpressions.  */
2509
2510void
2511verify_rtl_sharing (void)
2512{
2513  rtx p;
2514
2515  for (p = get_insns (); p; p = NEXT_INSN (p))
2516    if (INSN_P (p))
2517      {
2518	reset_used_flags (PATTERN (p));
2519	reset_used_flags (REG_NOTES (p));
2520	if (GET_CODE (PATTERN (p)) == SEQUENCE)
2521	  {
2522	    int i;
2523	    rtx q, sequence = PATTERN (p);
2524
2525	    for (i = 0; i < XVECLEN (sequence, 0); i++)
2526	      {
2527		q = XVECEXP (sequence, 0, i);
2528		gcc_assert (INSN_P (q));
2529		reset_used_flags (PATTERN (q));
2530		reset_used_flags (REG_NOTES (q));
2531	      }
2532	  }
2533      }
2534
2535  for (p = get_insns (); p; p = NEXT_INSN (p))
2536    if (INSN_P (p))
2537      {
2538	verify_rtx_sharing (PATTERN (p), p);
2539	verify_rtx_sharing (REG_NOTES (p), p);
2540      }
2541}
2542
2543/* Go through all the RTL insn bodies and copy any invalid shared structure.
2544   Assumes the mark bits are cleared at entry.  */
2545
2546void
2547unshare_all_rtl_in_chain (rtx insn)
2548{
2549  for (; insn; insn = NEXT_INSN (insn))
2550    if (INSN_P (insn))
2551      {
2552	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2553	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2554      }
2555}
2556
2557/* Go through all virtual stack slots of a function and mark them as
2558   shared.  We never replace the DECL_RTLs themselves with a copy,
2559   but expressions mentioned into a DECL_RTL cannot be shared with
2560   expressions in the instruction stream.
2561
2562   Note that reload may convert pseudo registers into memories in-place.
2563   Pseudo registers are always shared, but MEMs never are.  Thus if we
2564   reset the used flags on MEMs in the instruction stream, we must set
2565   them again on MEMs that appear in DECL_RTLs.  */
2566
2567static void
2568set_used_decls (tree blk)
2569{
2570  tree t;
2571
2572  /* Mark decls.  */
2573  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2574    if (DECL_RTL_SET_P (t))
2575      set_used_flags (DECL_RTL (t));
2576
2577  /* Now process sub-blocks.  */
2578  for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2579    set_used_decls (t);
2580}
2581
2582/* Mark ORIG as in use, and return a copy of it if it was already in use.
2583   Recursively does the same for subexpressions.  Uses
2584   copy_rtx_if_shared_1 to reduce stack space.  */
2585
2586rtx
2587copy_rtx_if_shared (rtx orig)
2588{
2589  copy_rtx_if_shared_1 (&orig);
2590  return orig;
2591}
2592
2593/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2594   use.  Recursively does the same for subexpressions.  */
2595
2596static void
2597copy_rtx_if_shared_1 (rtx *orig1)
2598{
2599  rtx x;
2600  int i;
2601  enum rtx_code code;
2602  rtx *last_ptr;
2603  const char *format_ptr;
2604  int copied = 0;
2605  int length;
2606
2607  /* Repeat is used to turn tail-recursion into iteration.  */
2608repeat:
2609  x = *orig1;
2610
2611  if (x == 0)
2612    return;
2613
2614  code = GET_CODE (x);
2615
2616  /* These types may be freely shared.  */
2617
2618  switch (code)
2619    {
2620    case REG:
2621    case DEBUG_EXPR:
2622    case VALUE:
2623    case CONST_INT:
2624    case CONST_DOUBLE:
2625    case CONST_FIXED:
2626    case CONST_VECTOR:
2627    case SYMBOL_REF:
2628    case LABEL_REF:
2629    case CODE_LABEL:
2630    case PC:
2631    case CC0:
2632    case SCRATCH:
2633      /* SCRATCH must be shared because they represent distinct values.  */
2634      return;
2635    case CLOBBER:
2636      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2637	return;
2638      break;
2639
2640    case CONST:
2641      if (shared_const_p (x))
2642	return;
2643      break;
2644
2645    case DEBUG_INSN:
2646    case INSN:
2647    case JUMP_INSN:
2648    case CALL_INSN:
2649    case NOTE:
2650    case BARRIER:
2651      /* The chain of insns is not being copied.  */
2652      return;
2653
2654    default:
2655      break;
2656    }
2657
2658  /* This rtx may not be shared.  If it has already been seen,
2659     replace it with a copy of itself.  */
2660
2661  if (RTX_FLAG (x, used))
2662    {
2663      x = shallow_copy_rtx (x);
2664      copied = 1;
2665    }
2666  RTX_FLAG (x, used) = 1;
2667
2668  /* Now scan the subexpressions recursively.
2669     We can store any replaced subexpressions directly into X
2670     since we know X is not shared!  Any vectors in X
2671     must be copied if X was copied.  */
2672
2673  format_ptr = GET_RTX_FORMAT (code);
2674  length = GET_RTX_LENGTH (code);
2675  last_ptr = NULL;
2676
2677  for (i = 0; i < length; i++)
2678    {
2679      switch (*format_ptr++)
2680	{
2681	case 'e':
2682          if (last_ptr)
2683            copy_rtx_if_shared_1 (last_ptr);
2684	  last_ptr = &XEXP (x, i);
2685	  break;
2686
2687	case 'E':
2688	  if (XVEC (x, i) != NULL)
2689	    {
2690	      int j;
2691	      int len = XVECLEN (x, i);
2692
2693              /* Copy the vector iff I copied the rtx and the length
2694		 is nonzero.  */
2695	      if (copied && len > 0)
2696		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2697
2698              /* Call recursively on all inside the vector.  */
2699	      for (j = 0; j < len; j++)
2700                {
2701		  if (last_ptr)
2702		    copy_rtx_if_shared_1 (last_ptr);
2703                  last_ptr = &XVECEXP (x, i, j);
2704                }
2705	    }
2706	  break;
2707	}
2708    }
2709  *orig1 = x;
2710  if (last_ptr)
2711    {
2712      orig1 = last_ptr;
2713      goto repeat;
2714    }
2715  return;
2716}
2717
2718/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2719   to look for shared sub-parts.  */
2720
2721void
2722reset_used_flags (rtx x)
2723{
2724  int i, j;
2725  enum rtx_code code;
2726  const char *format_ptr;
2727  int length;
2728
2729  /* Repeat is used to turn tail-recursion into iteration.  */
2730repeat:
2731  if (x == 0)
2732    return;
2733
2734  code = GET_CODE (x);
2735
2736  /* These types may be freely shared so we needn't do any resetting
2737     for them.  */
2738
2739  switch (code)
2740    {
2741    case REG:
2742    case DEBUG_EXPR:
2743    case VALUE:
2744    case CONST_INT:
2745    case CONST_DOUBLE:
2746    case CONST_FIXED:
2747    case CONST_VECTOR:
2748    case SYMBOL_REF:
2749    case CODE_LABEL:
2750    case PC:
2751    case CC0:
2752      return;
2753
2754    case DEBUG_INSN:
2755    case INSN:
2756    case JUMP_INSN:
2757    case CALL_INSN:
2758    case NOTE:
2759    case LABEL_REF:
2760    case BARRIER:
2761      /* The chain of insns is not being copied.  */
2762      return;
2763
2764    default:
2765      break;
2766    }
2767
2768  RTX_FLAG (x, used) = 0;
2769
2770  format_ptr = GET_RTX_FORMAT (code);
2771  length = GET_RTX_LENGTH (code);
2772
2773  for (i = 0; i < length; i++)
2774    {
2775      switch (*format_ptr++)
2776	{
2777	case 'e':
2778          if (i == length-1)
2779            {
2780              x = XEXP (x, i);
2781	      goto repeat;
2782            }
2783	  reset_used_flags (XEXP (x, i));
2784	  break;
2785
2786	case 'E':
2787	  for (j = 0; j < XVECLEN (x, i); j++)
2788	    reset_used_flags (XVECEXP (x, i, j));
2789	  break;
2790	}
2791    }
2792}
2793
2794/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2795   to look for shared sub-parts.  */
2796
2797void
2798set_used_flags (rtx x)
2799{
2800  int i, j;
2801  enum rtx_code code;
2802  const char *format_ptr;
2803
2804  if (x == 0)
2805    return;
2806
2807  code = GET_CODE (x);
2808
2809  /* These types may be freely shared so we needn't do any resetting
2810     for them.  */
2811
2812  switch (code)
2813    {
2814    case REG:
2815    case DEBUG_EXPR:
2816    case VALUE:
2817    case CONST_INT:
2818    case CONST_DOUBLE:
2819    case CONST_FIXED:
2820    case CONST_VECTOR:
2821    case SYMBOL_REF:
2822    case CODE_LABEL:
2823    case PC:
2824    case CC0:
2825      return;
2826
2827    case DEBUG_INSN:
2828    case INSN:
2829    case JUMP_INSN:
2830    case CALL_INSN:
2831    case NOTE:
2832    case LABEL_REF:
2833    case BARRIER:
2834      /* The chain of insns is not being copied.  */
2835      return;
2836
2837    default:
2838      break;
2839    }
2840
2841  RTX_FLAG (x, used) = 1;
2842
2843  format_ptr = GET_RTX_FORMAT (code);
2844  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2845    {
2846      switch (*format_ptr++)
2847	{
2848	case 'e':
2849	  set_used_flags (XEXP (x, i));
2850	  break;
2851
2852	case 'E':
2853	  for (j = 0; j < XVECLEN (x, i); j++)
2854	    set_used_flags (XVECEXP (x, i, j));
2855	  break;
2856	}
2857    }
2858}
2859
2860/* Copy X if necessary so that it won't be altered by changes in OTHER.
2861   Return X or the rtx for the pseudo reg the value of X was copied into.
2862   OTHER must be valid as a SET_DEST.  */
2863
2864rtx
2865make_safe_from (rtx x, rtx other)
2866{
2867  while (1)
2868    switch (GET_CODE (other))
2869      {
2870      case SUBREG:
2871	other = SUBREG_REG (other);
2872	break;
2873      case STRICT_LOW_PART:
2874      case SIGN_EXTEND:
2875      case ZERO_EXTEND:
2876	other = XEXP (other, 0);
2877	break;
2878      default:
2879	goto done;
2880      }
2881 done:
2882  if ((MEM_P (other)
2883       && ! CONSTANT_P (x)
2884       && !REG_P (x)
2885       && GET_CODE (x) != SUBREG)
2886      || (REG_P (other)
2887	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
2888	      || reg_mentioned_p (other, x))))
2889    {
2890      rtx temp = gen_reg_rtx (GET_MODE (x));
2891      emit_move_insn (temp, x);
2892      return temp;
2893    }
2894  return x;
2895}
2896
2897/* Emission of insns (adding them to the doubly-linked list).  */
2898
2899/* Return the first insn of the current sequence or current function.  */
2900
2901rtx
2902get_insns (void)
2903{
2904  return first_insn;
2905}
2906
2907/* Specify a new insn as the first in the chain.  */
2908
2909void
2910set_first_insn (rtx insn)
2911{
2912  gcc_assert (!PREV_INSN (insn));
2913  first_insn = insn;
2914}
2915
2916/* Return the last insn emitted in current sequence or current function.  */
2917
2918rtx
2919get_last_insn (void)
2920{
2921  return last_insn;
2922}
2923
2924/* Specify a new insn as the last in the chain.  */
2925
2926void
2927set_last_insn (rtx insn)
2928{
2929  gcc_assert (!NEXT_INSN (insn));
2930  last_insn = insn;
2931}
2932
2933/* Return the last insn emitted, even if it is in a sequence now pushed.  */
2934
2935rtx
2936get_last_insn_anywhere (void)
2937{
2938  struct sequence_stack *stack;
2939  if (last_insn)
2940    return last_insn;
2941  for (stack = seq_stack; stack; stack = stack->next)
2942    if (stack->last != 0)
2943      return stack->last;
2944  return 0;
2945}
2946
2947/* Return the first nonnote insn emitted in current sequence or current
2948   function.  This routine looks inside SEQUENCEs.  */
2949
2950rtx
2951get_first_nonnote_insn (void)
2952{
2953  rtx insn = first_insn;
2954
2955  if (insn)
2956    {
2957      if (NOTE_P (insn))
2958	for (insn = next_insn (insn);
2959	     insn && NOTE_P (insn);
2960	     insn = next_insn (insn))
2961	  continue;
2962      else
2963	{
2964	  if (NONJUMP_INSN_P (insn)
2965	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2966	    insn = XVECEXP (PATTERN (insn), 0, 0);
2967	}
2968    }
2969
2970  return insn;
2971}
2972
2973/* Return the last nonnote insn emitted in current sequence or current
2974   function.  This routine looks inside SEQUENCEs.  */
2975
2976rtx
2977get_last_nonnote_insn (void)
2978{
2979  rtx insn = last_insn;
2980
2981  if (insn)
2982    {
2983      if (NOTE_P (insn))
2984	for (insn = previous_insn (insn);
2985	     insn && NOTE_P (insn);
2986	     insn = previous_insn (insn))
2987	  continue;
2988      else
2989	{
2990	  if (NONJUMP_INSN_P (insn)
2991	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2992	    insn = XVECEXP (PATTERN (insn), 0,
2993			    XVECLEN (PATTERN (insn), 0) - 1);
2994	}
2995    }
2996
2997  return insn;
2998}
2999
3000/* Return a number larger than any instruction's uid in this function.  */
3001
3002int
3003get_max_uid (void)
3004{
3005  return cur_insn_uid;
3006}
3007
3008/* Return the number of actual (non-debug) insns emitted in this
3009   function.  */
3010
3011int
3012get_max_insn_count (void)
3013{
3014  int n = cur_insn_uid;
3015
3016  /* The table size must be stable across -g, to avoid codegen
3017     differences due to debug insns, and not be affected by
3018     -fmin-insn-uid, to avoid excessive table size and to simplify
3019     debugging of -fcompare-debug failures.  */
3020  if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3021    n -= cur_debug_insn_uid;
3022  else
3023    n -= MIN_NONDEBUG_INSN_UID;
3024
3025  return n;
3026}
3027
3028
3029/* Return the next insn.  If it is a SEQUENCE, return the first insn
3030   of the sequence.  */
3031
3032rtx
3033next_insn (rtx insn)
3034{
3035  if (insn)
3036    {
3037      insn = NEXT_INSN (insn);
3038      if (insn && NONJUMP_INSN_P (insn)
3039	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3040	insn = XVECEXP (PATTERN (insn), 0, 0);
3041    }
3042
3043  return insn;
3044}
3045
3046/* Return the previous insn.  If it is a SEQUENCE, return the last insn
3047   of the sequence.  */
3048
3049rtx
3050previous_insn (rtx insn)
3051{
3052  if (insn)
3053    {
3054      insn = PREV_INSN (insn);
3055      if (insn && NONJUMP_INSN_P (insn)
3056	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3057	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3058    }
3059
3060  return insn;
3061}
3062
3063/* Return the next insn after INSN that is not a NOTE.  This routine does not
3064   look inside SEQUENCEs.  */
3065
3066rtx
3067next_nonnote_insn (rtx insn)
3068{
3069  while (insn)
3070    {
3071      insn = NEXT_INSN (insn);
3072      if (insn == 0 || !NOTE_P (insn))
3073	break;
3074    }
3075
3076  return insn;
3077}
3078
3079/* Return the next insn after INSN that is not a NOTE, but stop the
3080   search before we enter another basic block.  This routine does not
3081   look inside SEQUENCEs.  */
3082
3083rtx
3084next_nonnote_insn_bb (rtx insn)
3085{
3086  while (insn)
3087    {
3088      insn = NEXT_INSN (insn);
3089      if (insn == 0 || !NOTE_P (insn))
3090	break;
3091      if (NOTE_INSN_BASIC_BLOCK_P (insn))
3092	return NULL_RTX;
3093    }
3094
3095  return insn;
3096}
3097
3098/* Return the previous insn before INSN that is not a NOTE.  This routine does
3099   not look inside SEQUENCEs.  */
3100
3101rtx
3102prev_nonnote_insn (rtx insn)
3103{
3104  while (insn)
3105    {
3106      insn = PREV_INSN (insn);
3107      if (insn == 0 || !NOTE_P (insn))
3108	break;
3109    }
3110
3111  return insn;
3112}
3113
3114/* Return the previous insn before INSN that is not a NOTE, but stop
3115   the search before we enter another basic block.  This routine does
3116   not look inside SEQUENCEs.  */
3117
3118rtx
3119prev_nonnote_insn_bb (rtx insn)
3120{
3121  while (insn)
3122    {
3123      insn = PREV_INSN (insn);
3124      if (insn == 0 || !NOTE_P (insn))
3125	break;
3126      if (NOTE_INSN_BASIC_BLOCK_P (insn))
3127	return NULL_RTX;
3128    }
3129
3130  return insn;
3131}
3132
3133/* Return the next insn after INSN that is not a DEBUG_INSN.  This
3134   routine does not look inside SEQUENCEs.  */
3135
3136rtx
3137next_nondebug_insn (rtx insn)
3138{
3139  while (insn)
3140    {
3141      insn = NEXT_INSN (insn);
3142      if (insn == 0 || !DEBUG_INSN_P (insn))
3143	break;
3144    }
3145
3146  return insn;
3147}
3148
3149/* Return the previous insn before INSN that is not a DEBUG_INSN.
3150   This routine does not look inside SEQUENCEs.  */
3151
3152rtx
3153prev_nondebug_insn (rtx insn)
3154{
3155  while (insn)
3156    {
3157      insn = PREV_INSN (insn);
3158      if (insn == 0 || !DEBUG_INSN_P (insn))
3159	break;
3160    }
3161
3162  return insn;
3163}
3164
3165/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3166   This routine does not look inside SEQUENCEs.  */
3167
3168rtx
3169next_nonnote_nondebug_insn (rtx insn)
3170{
3171  while (insn)
3172    {
3173      insn = NEXT_INSN (insn);
3174      if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3175	break;
3176    }
3177
3178  return insn;
3179}
3180
3181/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3182   This routine does not look inside SEQUENCEs.  */
3183
3184rtx
3185prev_nonnote_nondebug_insn (rtx insn)
3186{
3187  while (insn)
3188    {
3189      insn = PREV_INSN (insn);
3190      if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3191	break;
3192    }
3193
3194  return insn;
3195}
3196
3197/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3198   or 0, if there is none.  This routine does not look inside
3199   SEQUENCEs.  */
3200
3201rtx
3202next_real_insn (rtx insn)
3203{
3204  while (insn)
3205    {
3206      insn = NEXT_INSN (insn);
3207      if (insn == 0 || INSN_P (insn))
3208	break;
3209    }
3210
3211  return insn;
3212}
3213
3214/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3215   or 0, if there is none.  This routine does not look inside
3216   SEQUENCEs.  */
3217
3218rtx
3219prev_real_insn (rtx insn)
3220{
3221  while (insn)
3222    {
3223      insn = PREV_INSN (insn);
3224      if (insn == 0 || INSN_P (insn))
3225	break;
3226    }
3227
3228  return insn;
3229}
3230
3231/* Return the last CALL_INSN in the current list, or 0 if there is none.
3232   This routine does not look inside SEQUENCEs.  */
3233
3234rtx
3235last_call_insn (void)
3236{
3237  rtx insn;
3238
3239  for (insn = get_last_insn ();
3240       insn && !CALL_P (insn);
3241       insn = PREV_INSN (insn))
3242    ;
3243
3244  return insn;
3245}
3246
3247/* Find the next insn after INSN that really does something.  This routine
3248   does not look inside SEQUENCEs.  After reload this also skips over
3249   standalone USE and CLOBBER insn.  */
3250
3251int
3252active_insn_p (const_rtx insn)
3253{
3254  return (CALL_P (insn) || JUMP_P (insn)
3255	  || (NONJUMP_INSN_P (insn)
3256	      && (! reload_completed
3257		  || (GET_CODE (PATTERN (insn)) != USE
3258		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3259}
3260
3261rtx
3262next_active_insn (rtx insn)
3263{
3264  while (insn)
3265    {
3266      insn = NEXT_INSN (insn);
3267      if (insn == 0 || active_insn_p (insn))
3268	break;
3269    }
3270
3271  return insn;
3272}
3273
3274/* Find the last insn before INSN that really does something.  This routine
3275   does not look inside SEQUENCEs.  After reload this also skips over
3276   standalone USE and CLOBBER insn.  */
3277
3278rtx
3279prev_active_insn (rtx insn)
3280{
3281  while (insn)
3282    {
3283      insn = PREV_INSN (insn);
3284      if (insn == 0 || active_insn_p (insn))
3285	break;
3286    }
3287
3288  return insn;
3289}
3290
3291/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
3292
3293rtx
3294next_label (rtx insn)
3295{
3296  while (insn)
3297    {
3298      insn = NEXT_INSN (insn);
3299      if (insn == 0 || LABEL_P (insn))
3300	break;
3301    }
3302
3303  return insn;
3304}
3305
3306/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */
3307
3308rtx
3309prev_label (rtx insn)
3310{
3311  while (insn)
3312    {
3313      insn = PREV_INSN (insn);
3314      if (insn == 0 || LABEL_P (insn))
3315	break;
3316    }
3317
3318  return insn;
3319}
3320
3321/* Return the last label to mark the same position as LABEL.  Return null
3322   if LABEL itself is null.  */
3323
3324rtx
3325skip_consecutive_labels (rtx label)
3326{
3327  rtx insn;
3328
3329  for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3330    if (LABEL_P (insn))
3331      label = insn;
3332
3333  return label;
3334}
3335
3336#ifdef HAVE_cc0
3337/* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3338   and REG_CC_USER notes so we can find it.  */
3339
3340void
3341link_cc0_insns (rtx insn)
3342{
3343  rtx user = next_nonnote_insn (insn);
3344
3345  if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3346    user = XVECEXP (PATTERN (user), 0, 0);
3347
3348  add_reg_note (user, REG_CC_SETTER, insn);
3349  add_reg_note (insn, REG_CC_USER, user);
3350}
3351
3352/* Return the next insn that uses CC0 after INSN, which is assumed to
3353   set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3354   applied to the result of this function should yield INSN).
3355
3356   Normally, this is simply the next insn.  However, if a REG_CC_USER note
3357   is present, it contains the insn that uses CC0.
3358
3359   Return 0 if we can't find the insn.  */
3360
3361rtx
3362next_cc0_user (rtx insn)
3363{
3364  rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3365
3366  if (note)
3367    return XEXP (note, 0);
3368
3369  insn = next_nonnote_insn (insn);
3370  if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3371    insn = XVECEXP (PATTERN (insn), 0, 0);
3372
3373  if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3374    return insn;
3375
3376  return 0;
3377}
3378
3379/* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3380   note, it is the previous insn.  */
3381
3382rtx
3383prev_cc0_setter (rtx insn)
3384{
3385  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3386
3387  if (note)
3388    return XEXP (note, 0);
3389
3390  insn = prev_nonnote_insn (insn);
3391  gcc_assert (sets_cc0_p (PATTERN (insn)));
3392
3393  return insn;
3394}
3395#endif
3396
3397#ifdef AUTO_INC_DEC
3398/* Find a RTX_AUTOINC class rtx which matches DATA.  */
3399
3400static int
3401find_auto_inc (rtx *xp, void *data)
3402{
3403  rtx x = *xp;
3404  rtx reg = (rtx) data;
3405
3406  if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3407    return 0;
3408
3409  switch (GET_CODE (x))
3410    {
3411      case PRE_DEC:
3412      case PRE_INC:
3413      case POST_DEC:
3414      case POST_INC:
3415      case PRE_MODIFY:
3416      case POST_MODIFY:
3417	if (rtx_equal_p (reg, XEXP (x, 0)))
3418	  return 1;
3419	break;
3420
3421      default:
3422	gcc_unreachable ();
3423    }
3424  return -1;
3425}
3426#endif
3427
3428/* Increment the label uses for all labels present in rtx.  */
3429
3430static void
3431mark_label_nuses (rtx x)
3432{
3433  enum rtx_code code;
3434  int i, j;
3435  const char *fmt;
3436
3437  code = GET_CODE (x);
3438  if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3439    LABEL_NUSES (XEXP (x, 0))++;
3440
3441  fmt = GET_RTX_FORMAT (code);
3442  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3443    {
3444      if (fmt[i] == 'e')
3445	mark_label_nuses (XEXP (x, i));
3446      else if (fmt[i] == 'E')
3447	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3448	  mark_label_nuses (XVECEXP (x, i, j));
3449    }
3450}
3451
3452
3453/* Try splitting insns that can be split for better scheduling.
3454   PAT is the pattern which might split.
3455   TRIAL is the insn providing PAT.
3456   LAST is nonzero if we should return the last insn of the sequence produced.
3457
3458   If this routine succeeds in splitting, it returns the first or last
3459   replacement insn depending on the value of LAST.  Otherwise, it
3460   returns TRIAL.  If the insn to be returned can be split, it will be.  */
3461
3462rtx
3463try_split (rtx pat, rtx trial, int last)
3464{
3465  rtx before = PREV_INSN (trial);
3466  rtx after = NEXT_INSN (trial);
3467  int has_barrier = 0;
3468  rtx note, seq, tem;
3469  int probability;
3470  rtx insn_last, insn;
3471  int njumps = 0;
3472
3473  /* We're not good at redistributing frame information.  */
3474  if (RTX_FRAME_RELATED_P (trial))
3475    return trial;
3476
3477  if (any_condjump_p (trial)
3478      && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3479    split_branch_probability = INTVAL (XEXP (note, 0));
3480  probability = split_branch_probability;
3481
3482  seq = split_insns (pat, trial);
3483
3484  split_branch_probability = -1;
3485
3486  /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3487     We may need to handle this specially.  */
3488  if (after && BARRIER_P (after))
3489    {
3490      has_barrier = 1;
3491      after = NEXT_INSN (after);
3492    }
3493
3494  if (!seq)
3495    return trial;
3496
3497  /* Avoid infinite loop if any insn of the result matches
3498     the original pattern.  */
3499  insn_last = seq;
3500  while (1)
3501    {
3502      if (INSN_P (insn_last)
3503	  && rtx_equal_p (PATTERN (insn_last), pat))
3504	return trial;
3505      if (!NEXT_INSN (insn_last))
3506	break;
3507      insn_last = NEXT_INSN (insn_last);
3508    }
3509
3510  /* We will be adding the new sequence to the function.  The splitters
3511     may have introduced invalid RTL sharing, so unshare the sequence now.  */
3512  unshare_all_rtl_in_chain (seq);
3513
3514  /* Mark labels.  */
3515  for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3516    {
3517      if (JUMP_P (insn))
3518	{
3519	  mark_jump_label (PATTERN (insn), insn, 0);
3520	  njumps++;
3521	  if (probability != -1
3522	      && any_condjump_p (insn)
3523	      && !find_reg_note (insn, REG_BR_PROB, 0))
3524	    {
3525	      /* We can preserve the REG_BR_PROB notes only if exactly
3526		 one jump is created, otherwise the machine description
3527		 is responsible for this step using
3528		 split_branch_probability variable.  */
3529	      gcc_assert (njumps == 1);
3530	      add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3531	    }
3532	}
3533    }
3534
3535  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3536     in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
3537  if (CALL_P (trial))
3538    {
3539      for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3540	if (CALL_P (insn))
3541	  {
3542	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3543	    while (*p)
3544	      p = &XEXP (*p, 1);
3545	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3546	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3547
3548	    /* Update the debug information for the CALL_INSN.  */
3549	    if (flag_enable_icf_debug)
3550	      (*debug_hooks->copy_call_info) (trial, insn);
3551	  }
3552    }
3553
3554  /* Copy notes, particularly those related to the CFG.  */
3555  for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3556    {
3557      switch (REG_NOTE_KIND (note))
3558	{
3559	case REG_EH_REGION:
3560	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3561	  break;
3562
3563	case REG_NORETURN:
3564	case REG_SETJMP:
3565	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3566	    {
3567	      if (CALL_P (insn))
3568		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3569	    }
3570	  break;
3571
3572	case REG_NON_LOCAL_GOTO:
3573	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3574	    {
3575	      if (JUMP_P (insn))
3576		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3577	    }
3578	  break;
3579
3580#ifdef AUTO_INC_DEC
3581	case REG_INC:
3582	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3583	    {
3584	      rtx reg = XEXP (note, 0);
3585	      if (!FIND_REG_INC_NOTE (insn, reg)
3586		  && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3587		add_reg_note (insn, REG_INC, reg);
3588	    }
3589	  break;
3590#endif
3591
3592	default:
3593	  break;
3594	}
3595    }
3596
3597  /* If there are LABELS inside the split insns increment the
3598     usage count so we don't delete the label.  */
3599  if (INSN_P (trial))
3600    {
3601      insn = insn_last;
3602      while (insn != NULL_RTX)
3603	{
3604	  /* JUMP_P insns have already been "marked" above.  */
3605	  if (NONJUMP_INSN_P (insn))
3606	    mark_label_nuses (PATTERN (insn));
3607
3608	  insn = PREV_INSN (insn);
3609	}
3610    }
3611
3612  tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3613
3614  delete_insn (trial);
3615  if (has_barrier)
3616    emit_barrier_after (tem);
3617
3618  /* Recursively call try_split for each new insn created; by the
3619     time control returns here that insn will be fully split, so
3620     set LAST and continue from the insn after the one returned.
3621     We can't use next_active_insn here since AFTER may be a note.
3622     Ignore deleted insns, which can be occur if not optimizing.  */
3623  for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3624    if (! INSN_DELETED_P (tem) && INSN_P (tem))
3625      tem = try_split (PATTERN (tem), tem, 1);
3626
3627  /* Return either the first or the last insn, depending on which was
3628     requested.  */
3629  return last
3630    ? (after ? PREV_INSN (after) : last_insn)
3631    : NEXT_INSN (before);
3632}
3633
3634/* Make and return an INSN rtx, initializing all its slots.
3635   Store PATTERN in the pattern slots.  */
3636
3637rtx
3638make_insn_raw (rtx pattern)
3639{
3640  rtx insn;
3641
3642  insn = rtx_alloc (INSN);
3643
3644  INSN_UID (insn) = cur_insn_uid++;
3645  PATTERN (insn) = pattern;
3646  INSN_CODE (insn) = -1;
3647  REG_NOTES (insn) = NULL;
3648  INSN_LOCATOR (insn) = curr_insn_locator ();
3649  BLOCK_FOR_INSN (insn) = NULL;
3650
3651#ifdef ENABLE_RTL_CHECKING
3652  if (insn
3653      && INSN_P (insn)
3654      && (returnjump_p (insn)
3655	  || (GET_CODE (insn) == SET
3656	      && SET_DEST (insn) == pc_rtx)))
3657    {
3658      warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3659      debug_rtx (insn);
3660    }
3661#endif
3662
3663  return insn;
3664}
3665
3666/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
3667
3668rtx
3669make_debug_insn_raw (rtx pattern)
3670{
3671  rtx insn;
3672
3673  insn = rtx_alloc (DEBUG_INSN);
3674  INSN_UID (insn) = cur_debug_insn_uid++;
3675  if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3676    INSN_UID (insn) = cur_insn_uid++;
3677
3678  PATTERN (insn) = pattern;
3679  INSN_CODE (insn) = -1;
3680  REG_NOTES (insn) = NULL;
3681  INSN_LOCATOR (insn) = curr_insn_locator ();
3682  BLOCK_FOR_INSN (insn) = NULL;
3683
3684  return insn;
3685}
3686
3687/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3688
3689rtx
3690make_jump_insn_raw (rtx pattern)
3691{
3692  rtx insn;
3693
3694  insn = rtx_alloc (JUMP_INSN);
3695  INSN_UID (insn) = cur_insn_uid++;
3696
3697  PATTERN (insn) = pattern;
3698  INSN_CODE (insn) = -1;
3699  REG_NOTES (insn) = NULL;
3700  JUMP_LABEL (insn) = NULL;
3701  INSN_LOCATOR (insn) = curr_insn_locator ();
3702  BLOCK_FOR_INSN (insn) = NULL;
3703
3704  return insn;
3705}
3706
3707/* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3708
3709static rtx
3710make_call_insn_raw (rtx pattern)
3711{
3712  rtx insn;
3713
3714  insn = rtx_alloc (CALL_INSN);
3715  INSN_UID (insn) = cur_insn_uid++;
3716
3717  PATTERN (insn) = pattern;
3718  INSN_CODE (insn) = -1;
3719  REG_NOTES (insn) = NULL;
3720  CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3721  INSN_LOCATOR (insn) = curr_insn_locator ();
3722  BLOCK_FOR_INSN (insn) = NULL;
3723
3724  return insn;
3725}
3726
3727/* Add INSN to the end of the doubly-linked list.
3728   INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3729
3730void
3731add_insn (rtx insn)
3732{
3733  PREV_INSN (insn) = last_insn;
3734  NEXT_INSN (insn) = 0;
3735
3736  if (NULL != last_insn)
3737    NEXT_INSN (last_insn) = insn;
3738
3739  if (NULL == first_insn)
3740    first_insn = insn;
3741
3742  last_insn = insn;
3743}
3744
3745/* Add INSN into the doubly-linked list after insn AFTER.  This and
3746   the next should be the only functions called to insert an insn once
3747   delay slots have been filled since only they know how to update a
3748   SEQUENCE.  */
3749
3750void
3751add_insn_after (rtx insn, rtx after, basic_block bb)
3752{
3753  rtx next = NEXT_INSN (after);
3754
3755  gcc_assert (!optimize || !INSN_DELETED_P (after));
3756
3757  NEXT_INSN (insn) = next;
3758  PREV_INSN (insn) = after;
3759
3760  if (next)
3761    {
3762      PREV_INSN (next) = insn;
3763      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3764	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3765    }
3766  else if (last_insn == after)
3767    last_insn = insn;
3768  else
3769    {
3770      struct sequence_stack *stack = seq_stack;
3771      /* Scan all pending sequences too.  */
3772      for (; stack; stack = stack->next)
3773	if (after == stack->last)
3774	  {
3775	    stack->last = insn;
3776	    break;
3777	  }
3778
3779      gcc_assert (stack);
3780    }
3781
3782  if (!BARRIER_P (after)
3783      && !BARRIER_P (insn)
3784      && (bb = BLOCK_FOR_INSN (after)))
3785    {
3786      set_block_for_insn (insn, bb);
3787      if (INSN_P (insn))
3788	df_insn_rescan (insn);
3789      /* Should not happen as first in the BB is always
3790	 either NOTE or LABEL.  */
3791      if (BB_END (bb) == after
3792	  /* Avoid clobbering of structure when creating new BB.  */
3793	  && !BARRIER_P (insn)
3794	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
3795	BB_END (bb) = insn;
3796    }
3797
3798  NEXT_INSN (after) = insn;
3799  if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3800    {
3801      rtx sequence = PATTERN (after);
3802      NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3803    }
3804}
3805
3806/* Add INSN into the doubly-linked list before insn BEFORE.  This and
3807   the previous should be the only functions called to insert an insn
3808   once delay slots have been filled since only they know how to
3809   update a SEQUENCE.  If BB is NULL, an attempt is made to infer the
3810   bb from before.  */
3811
3812void
3813add_insn_before (rtx insn, rtx before, basic_block bb)
3814{
3815  rtx prev = PREV_INSN (before);
3816
3817  gcc_assert (!optimize || !INSN_DELETED_P (before));
3818
3819  PREV_INSN (insn) = prev;
3820  NEXT_INSN (insn) = before;
3821
3822  if (prev)
3823    {
3824      NEXT_INSN (prev) = insn;
3825      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3826	{
3827	  rtx sequence = PATTERN (prev);
3828	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3829	}
3830    }
3831  else if (first_insn == before)
3832    first_insn = insn;
3833  else
3834    {
3835      struct sequence_stack *stack = seq_stack;
3836      /* Scan all pending sequences too.  */
3837      for (; stack; stack = stack->next)
3838	if (before == stack->first)
3839	  {
3840	    stack->first = insn;
3841	    break;
3842	  }
3843
3844      gcc_assert (stack);
3845    }
3846
3847  if (!bb
3848      && !BARRIER_P (before)
3849      && !BARRIER_P (insn))
3850    bb = BLOCK_FOR_INSN (before);
3851
3852  if (bb)
3853    {
3854      set_block_for_insn (insn, bb);
3855      if (INSN_P (insn))
3856	df_insn_rescan (insn);
3857      /* Should not happen as first in the BB is always either NOTE or
3858	 LABEL.  */
3859      gcc_assert (BB_HEAD (bb) != insn
3860		  /* Avoid clobbering of structure when creating new BB.  */
3861		  || BARRIER_P (insn)
3862		  || NOTE_INSN_BASIC_BLOCK_P (insn));
3863    }
3864
3865  PREV_INSN (before) = insn;
3866  if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3867    PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3868}
3869
3870
3871/* Replace insn with an deleted instruction note.  */
3872
3873void
3874set_insn_deleted (rtx insn)
3875{
3876  df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3877  PUT_CODE (insn, NOTE);
3878  NOTE_KIND (insn) = NOTE_INSN_DELETED;
3879}
3880
3881
3882/* Remove an insn from its doubly-linked list.  This function knows how
3883   to handle sequences.  */
3884void
3885remove_insn (rtx insn)
3886{
3887  rtx next = NEXT_INSN (insn);
3888  rtx prev = PREV_INSN (insn);
3889  basic_block bb;
3890
3891  /* Later in the code, the block will be marked dirty.  */
3892  df_insn_delete (NULL, INSN_UID (insn));
3893
3894  if (prev)
3895    {
3896      NEXT_INSN (prev) = next;
3897      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3898	{
3899	  rtx sequence = PATTERN (prev);
3900	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3901	}
3902    }
3903  else if (first_insn == insn)
3904    first_insn = next;
3905  else
3906    {
3907      struct sequence_stack *stack = seq_stack;
3908      /* Scan all pending sequences too.  */
3909      for (; stack; stack = stack->next)
3910	if (insn == stack->first)
3911	  {
3912	    stack->first = next;
3913	    break;
3914	  }
3915
3916      gcc_assert (stack);
3917    }
3918
3919  if (next)
3920    {
3921      PREV_INSN (next) = prev;
3922      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3923	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3924    }
3925  else if (last_insn == insn)
3926    last_insn = prev;
3927  else
3928    {
3929      struct sequence_stack *stack = seq_stack;
3930      /* Scan all pending sequences too.  */
3931      for (; stack; stack = stack->next)
3932	if (insn == stack->last)
3933	  {
3934	    stack->last = prev;
3935	    break;
3936	  }
3937
3938      gcc_assert (stack);
3939    }
3940  if (!BARRIER_P (insn)
3941      && (bb = BLOCK_FOR_INSN (insn)))
3942    {
3943      if (INSN_P (insn))
3944	df_set_bb_dirty (bb);
3945      if (BB_HEAD (bb) == insn)
3946	{
3947	  /* Never ever delete the basic block note without deleting whole
3948	     basic block.  */
3949	  gcc_assert (!NOTE_P (insn));
3950	  BB_HEAD (bb) = next;
3951	}
3952      if (BB_END (bb) == insn)
3953	BB_END (bb) = prev;
3954    }
3955}
3956
3957/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
3958
3959void
3960add_function_usage_to (rtx call_insn, rtx call_fusage)
3961{
3962  gcc_assert (call_insn && CALL_P (call_insn));
3963
3964  /* Put the register usage information on the CALL.  If there is already
3965     some usage information, put ours at the end.  */
3966  if (CALL_INSN_FUNCTION_USAGE (call_insn))
3967    {
3968      rtx link;
3969
3970      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3971	   link = XEXP (link, 1))
3972	;
3973
3974      XEXP (link, 1) = call_fusage;
3975    }
3976  else
3977    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3978}
3979
3980/* Delete all insns made since FROM.
3981   FROM becomes the new last instruction.  */
3982
3983void
3984delete_insns_since (rtx from)
3985{
3986  if (from == 0)
3987    first_insn = 0;
3988  else
3989    NEXT_INSN (from) = 0;
3990  last_insn = from;
3991}
3992
3993/* This function is deprecated, please use sequences instead.
3994
3995   Move a consecutive bunch of insns to a different place in the chain.
3996   The insns to be moved are those between FROM and TO.
3997   They are moved to a new position after the insn AFTER.
3998   AFTER must not be FROM or TO or any insn in between.
3999
4000   This function does not know about SEQUENCEs and hence should not be
4001   called after delay-slot filling has been done.  */
4002
4003void
4004reorder_insns_nobb (rtx from, rtx to, rtx after)
4005{
4006  /* Splice this bunch out of where it is now.  */
4007  if (PREV_INSN (from))
4008    NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4009  if (NEXT_INSN (to))
4010    PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4011  if (last_insn == to)
4012    last_insn = PREV_INSN (from);
4013  if (first_insn == from)
4014    first_insn = NEXT_INSN (to);
4015
4016  /* Make the new neighbors point to it and it to them.  */
4017  if (NEXT_INSN (after))
4018    PREV_INSN (NEXT_INSN (after)) = to;
4019
4020  NEXT_INSN (to) = NEXT_INSN (after);
4021  PREV_INSN (from) = after;
4022  NEXT_INSN (after) = from;
4023  if (after == last_insn)
4024    last_insn = to;
4025}
4026
4027/* Same as function above, but take care to update BB boundaries.  */
4028void
4029reorder_insns (rtx from, rtx to, rtx after)
4030{
4031  rtx prev = PREV_INSN (from);
4032  basic_block bb, bb2;
4033
4034  reorder_insns_nobb (from, to, after);
4035
4036  if (!BARRIER_P (after)
4037      && (bb = BLOCK_FOR_INSN (after)))
4038    {
4039      rtx x;
4040      df_set_bb_dirty (bb);
4041
4042      if (!BARRIER_P (from)
4043	  && (bb2 = BLOCK_FOR_INSN (from)))
4044	{
4045	  if (BB_END (bb2) == to)
4046	    BB_END (bb2) = prev;
4047	  df_set_bb_dirty (bb2);
4048	}
4049
4050      if (BB_END (bb) == after)
4051	BB_END (bb) = to;
4052
4053      for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4054	if (!BARRIER_P (x))
4055	  df_insn_change_bb (x, bb);
4056    }
4057}
4058
4059
4060/* Emit insn(s) of given code and pattern
4061   at a specified place within the doubly-linked list.
4062
4063   All of the emit_foo global entry points accept an object
4064   X which is either an insn list or a PATTERN of a single
4065   instruction.
4066
4067   There are thus a few canonical ways to generate code and
4068   emit it at a specific place in the instruction stream.  For
4069   example, consider the instruction named SPOT and the fact that
4070   we would like to emit some instructions before SPOT.  We might
4071   do it like this:
4072
4073	start_sequence ();
4074	... emit the new instructions ...
4075	insns_head = get_insns ();
4076	end_sequence ();
4077
4078	emit_insn_before (insns_head, SPOT);
4079
4080   It used to be common to generate SEQUENCE rtl instead, but that
4081   is a relic of the past which no longer occurs.  The reason is that
4082   SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4083   generated would almost certainly die right after it was created.  */
4084
4085/* Make X be output before the instruction BEFORE.  */
4086
4087rtx
4088emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4089{
4090  rtx last = before;
4091  rtx insn;
4092
4093  gcc_assert (before);
4094
4095  if (x == NULL_RTX)
4096    return last;
4097
4098  switch (GET_CODE (x))
4099    {
4100    case DEBUG_INSN:
4101    case INSN:
4102    case JUMP_INSN:
4103    case CALL_INSN:
4104    case CODE_LABEL:
4105    case BARRIER:
4106    case NOTE:
4107      insn = x;
4108      while (insn)
4109	{
4110	  rtx next = NEXT_INSN (insn);
4111	  add_insn_before (insn, before, bb);
4112	  last = insn;
4113	  insn = next;
4114	}
4115      break;
4116
4117#ifdef ENABLE_RTL_CHECKING
4118    case SEQUENCE:
4119      gcc_unreachable ();
4120      break;
4121#endif
4122
4123    default:
4124      last = make_insn_raw (x);
4125      add_insn_before (last, before, bb);
4126      break;
4127    }
4128
4129  return last;
4130}
4131
4132/* Make an instruction with body X and code JUMP_INSN
4133   and output it before the instruction BEFORE.  */
4134
4135rtx
4136emit_jump_insn_before_noloc (rtx x, rtx before)
4137{
4138  rtx insn, last = NULL_RTX;
4139
4140  gcc_assert (before);
4141
4142  switch (GET_CODE (x))
4143    {
4144    case DEBUG_INSN:
4145    case INSN:
4146    case JUMP_INSN:
4147    case CALL_INSN:
4148    case CODE_LABEL:
4149    case BARRIER:
4150    case NOTE:
4151      insn = x;
4152      while (insn)
4153	{
4154	  rtx next = NEXT_INSN (insn);
4155	  add_insn_before (insn, before, NULL);
4156	  last = insn;
4157	  insn = next;
4158	}
4159      break;
4160
4161#ifdef ENABLE_RTL_CHECKING
4162    case SEQUENCE:
4163      gcc_unreachable ();
4164      break;
4165#endif
4166
4167    default:
4168      last = make_jump_insn_raw (x);
4169      add_insn_before (last, before, NULL);
4170      break;
4171    }
4172
4173  return last;
4174}
4175
4176/* Make an instruction with body X and code CALL_INSN
4177   and output it before the instruction BEFORE.  */
4178
4179rtx
4180emit_call_insn_before_noloc (rtx x, rtx before)
4181{
4182  rtx last = NULL_RTX, insn;
4183
4184  gcc_assert (before);
4185
4186  switch (GET_CODE (x))
4187    {
4188    case DEBUG_INSN:
4189    case INSN:
4190    case JUMP_INSN:
4191    case CALL_INSN:
4192    case CODE_LABEL:
4193    case BARRIER:
4194    case NOTE:
4195      insn = x;
4196      while (insn)
4197	{
4198	  rtx next = NEXT_INSN (insn);
4199	  add_insn_before (insn, before, NULL);
4200	  last = insn;
4201	  insn = next;
4202	}
4203      break;
4204
4205#ifdef ENABLE_RTL_CHECKING
4206    case SEQUENCE:
4207      gcc_unreachable ();
4208      break;
4209#endif
4210
4211    default:
4212      last = make_call_insn_raw (x);
4213      add_insn_before (last, before, NULL);
4214      break;
4215    }
4216
4217  return last;
4218}
4219
4220/* Make an instruction with body X and code DEBUG_INSN
4221   and output it before the instruction BEFORE.  */
4222
4223rtx
4224emit_debug_insn_before_noloc (rtx x, rtx before)
4225{
4226  rtx last = NULL_RTX, insn;
4227
4228  gcc_assert (before);
4229
4230  switch (GET_CODE (x))
4231    {
4232    case DEBUG_INSN:
4233    case INSN:
4234    case JUMP_INSN:
4235    case CALL_INSN:
4236    case CODE_LABEL:
4237    case BARRIER:
4238    case NOTE:
4239      insn = x;
4240      while (insn)
4241	{
4242	  rtx next = NEXT_INSN (insn);
4243	  add_insn_before (insn, before, NULL);
4244	  last = insn;
4245	  insn = next;
4246	}
4247      break;
4248
4249#ifdef ENABLE_RTL_CHECKING
4250    case SEQUENCE:
4251      gcc_unreachable ();
4252      break;
4253#endif
4254
4255    default:
4256      last = make_debug_insn_raw (x);
4257      add_insn_before (last, before, NULL);
4258      break;
4259    }
4260
4261  return last;
4262}
4263
4264/* Make an insn of code BARRIER
4265   and output it before the insn BEFORE.  */
4266
4267rtx
4268emit_barrier_before (rtx before)
4269{
4270  rtx insn = rtx_alloc (BARRIER);
4271
4272  INSN_UID (insn) = cur_insn_uid++;
4273
4274  add_insn_before (insn, before, NULL);
4275  return insn;
4276}
4277
4278/* Emit the label LABEL before the insn BEFORE.  */
4279
4280rtx
4281emit_label_before (rtx label, rtx before)
4282{
4283  /* This can be called twice for the same label as a result of the
4284     confusion that follows a syntax error!  So make it harmless.  */
4285  if (INSN_UID (label) == 0)
4286    {
4287      INSN_UID (label) = cur_insn_uid++;
4288      add_insn_before (label, before, NULL);
4289    }
4290
4291  return label;
4292}
4293
4294/* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4295
4296rtx
4297emit_note_before (enum insn_note subtype, rtx before)
4298{
4299  rtx note = rtx_alloc (NOTE);
4300  INSN_UID (note) = cur_insn_uid++;
4301  NOTE_KIND (note) = subtype;
4302  BLOCK_FOR_INSN (note) = NULL;
4303  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4304
4305  add_insn_before (note, before, NULL);
4306  return note;
4307}
4308
4309/* Helper for emit_insn_after, handles lists of instructions
4310   efficiently.  */
4311
4312static rtx
4313emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4314{
4315  rtx last;
4316  rtx after_after;
4317  if (!bb && !BARRIER_P (after))
4318    bb = BLOCK_FOR_INSN (after);
4319
4320  if (bb)
4321    {
4322      df_set_bb_dirty (bb);
4323      for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4324	if (!BARRIER_P (last))
4325	  {
4326	    set_block_for_insn (last, bb);
4327	    df_insn_rescan (last);
4328	  }
4329      if (!BARRIER_P (last))
4330	{
4331	  set_block_for_insn (last, bb);
4332	  df_insn_rescan (last);
4333	}
4334      if (BB_END (bb) == after)
4335	BB_END (bb) = last;
4336    }
4337  else
4338    for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4339      continue;
4340
4341  after_after = NEXT_INSN (after);
4342
4343  NEXT_INSN (after) = first;
4344  PREV_INSN (first) = after;
4345  NEXT_INSN (last) = after_after;
4346  if (after_after)
4347    PREV_INSN (after_after) = last;
4348
4349  if (after == last_insn)
4350    last_insn = last;
4351
4352  return last;
4353}
4354
4355/* Make X be output after the insn AFTER and set the BB of insn.  If
4356   BB is NULL, an attempt is made to infer the BB from AFTER.  */
4357
4358rtx
4359emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4360{
4361  rtx last = after;
4362
4363  gcc_assert (after);
4364
4365  if (x == NULL_RTX)
4366    return last;
4367
4368  switch (GET_CODE (x))
4369    {
4370    case DEBUG_INSN:
4371    case INSN:
4372    case JUMP_INSN:
4373    case CALL_INSN:
4374    case CODE_LABEL:
4375    case BARRIER:
4376    case NOTE:
4377      last = emit_insn_after_1 (x, after, bb);
4378      break;
4379
4380#ifdef ENABLE_RTL_CHECKING
4381    case SEQUENCE:
4382      gcc_unreachable ();
4383      break;
4384#endif
4385
4386    default:
4387      last = make_insn_raw (x);
4388      add_insn_after (last, after, bb);
4389      break;
4390    }
4391
4392  return last;
4393}
4394
4395
4396/* Make an insn of code JUMP_INSN with body X
4397   and output it after the insn AFTER.  */
4398
4399rtx
4400emit_jump_insn_after_noloc (rtx x, rtx after)
4401{
4402  rtx last;
4403
4404  gcc_assert (after);
4405
4406  switch (GET_CODE (x))
4407    {
4408    case DEBUG_INSN:
4409    case INSN:
4410    case JUMP_INSN:
4411    case CALL_INSN:
4412    case CODE_LABEL:
4413    case BARRIER:
4414    case NOTE:
4415      last = emit_insn_after_1 (x, after, NULL);
4416      break;
4417
4418#ifdef ENABLE_RTL_CHECKING
4419    case SEQUENCE:
4420      gcc_unreachable ();
4421      break;
4422#endif
4423
4424    default:
4425      last = make_jump_insn_raw (x);
4426      add_insn_after (last, after, NULL);
4427      break;
4428    }
4429
4430  return last;
4431}
4432
4433/* Make an instruction with body X and code CALL_INSN
4434   and output it after the instruction AFTER.  */
4435
4436rtx
4437emit_call_insn_after_noloc (rtx x, rtx after)
4438{
4439  rtx last;
4440
4441  gcc_assert (after);
4442
4443  switch (GET_CODE (x))
4444    {
4445    case DEBUG_INSN:
4446    case INSN:
4447    case JUMP_INSN:
4448    case CALL_INSN:
4449    case CODE_LABEL:
4450    case BARRIER:
4451    case NOTE:
4452      last = emit_insn_after_1 (x, after, NULL);
4453      break;
4454
4455#ifdef ENABLE_RTL_CHECKING
4456    case SEQUENCE:
4457      gcc_unreachable ();
4458      break;
4459#endif
4460
4461    default:
4462      last = make_call_insn_raw (x);
4463      add_insn_after (last, after, NULL);
4464      break;
4465    }
4466
4467  return last;
4468}
4469
4470/* Make an instruction with body X and code CALL_INSN
4471   and output it after the instruction AFTER.  */
4472
4473rtx
4474emit_debug_insn_after_noloc (rtx x, rtx after)
4475{
4476  rtx last;
4477
4478  gcc_assert (after);
4479
4480  switch (GET_CODE (x))
4481    {
4482    case DEBUG_INSN:
4483    case INSN:
4484    case JUMP_INSN:
4485    case CALL_INSN:
4486    case CODE_LABEL:
4487    case BARRIER:
4488    case NOTE:
4489      last = emit_insn_after_1 (x, after, NULL);
4490      break;
4491
4492#ifdef ENABLE_RTL_CHECKING
4493    case SEQUENCE:
4494      gcc_unreachable ();
4495      break;
4496#endif
4497
4498    default:
4499      last = make_debug_insn_raw (x);
4500      add_insn_after (last, after, NULL);
4501      break;
4502    }
4503
4504  return last;
4505}
4506
4507/* Make an insn of code BARRIER
4508   and output it after the insn AFTER.  */
4509
4510rtx
4511emit_barrier_after (rtx after)
4512{
4513  rtx insn = rtx_alloc (BARRIER);
4514
4515  INSN_UID (insn) = cur_insn_uid++;
4516
4517  add_insn_after (insn, after, NULL);
4518  return insn;
4519}
4520
4521/* Emit the label LABEL after the insn AFTER.  */
4522
4523rtx
4524emit_label_after (rtx label, rtx after)
4525{
4526  /* This can be called twice for the same label
4527     as a result of the confusion that follows a syntax error!
4528     So make it harmless.  */
4529  if (INSN_UID (label) == 0)
4530    {
4531      INSN_UID (label) = cur_insn_uid++;
4532      add_insn_after (label, after, NULL);
4533    }
4534
4535  return label;
4536}
4537
4538/* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4539
4540rtx
4541emit_note_after (enum insn_note subtype, rtx after)
4542{
4543  rtx note = rtx_alloc (NOTE);
4544  INSN_UID (note) = cur_insn_uid++;
4545  NOTE_KIND (note) = subtype;
4546  BLOCK_FOR_INSN (note) = NULL;
4547  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4548  add_insn_after (note, after, NULL);
4549  return note;
4550}
4551
4552/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4553rtx
4554emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4555{
4556  rtx last = emit_insn_after_noloc (pattern, after, NULL);
4557
4558  if (pattern == NULL_RTX || !loc)
4559    return last;
4560
4561  after = NEXT_INSN (after);
4562  while (1)
4563    {
4564      if (active_insn_p (after) && !INSN_LOCATOR (after))
4565	INSN_LOCATOR (after) = loc;
4566      if (after == last)
4567	break;
4568      after = NEXT_INSN (after);
4569    }
4570  return last;
4571}
4572
4573/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4574rtx
4575emit_insn_after (rtx pattern, rtx after)
4576{
4577  rtx prev = after;
4578
4579  while (DEBUG_INSN_P (prev))
4580    prev = PREV_INSN (prev);
4581
4582  if (INSN_P (prev))
4583    return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4584  else
4585    return emit_insn_after_noloc (pattern, after, NULL);
4586}
4587
4588/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4589rtx
4590emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4591{
4592  rtx last = emit_jump_insn_after_noloc (pattern, after);
4593
4594  if (pattern == NULL_RTX || !loc)
4595    return last;
4596
4597  after = NEXT_INSN (after);
4598  while (1)
4599    {
4600      if (active_insn_p (after) && !INSN_LOCATOR (after))
4601	INSN_LOCATOR (after) = loc;
4602      if (after == last)
4603	break;
4604      after = NEXT_INSN (after);
4605    }
4606  return last;
4607}
4608
4609/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4610rtx
4611emit_jump_insn_after (rtx pattern, rtx after)
4612{
4613  rtx prev = after;
4614
4615  while (DEBUG_INSN_P (prev))
4616    prev = PREV_INSN (prev);
4617
4618  if (INSN_P (prev))
4619    return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4620  else
4621    return emit_jump_insn_after_noloc (pattern, after);
4622}
4623
4624/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4625rtx
4626emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4627{
4628  rtx last = emit_call_insn_after_noloc (pattern, after);
4629
4630  if (pattern == NULL_RTX || !loc)
4631    return last;
4632
4633  after = NEXT_INSN (after);
4634  while (1)
4635    {
4636      if (active_insn_p (after) && !INSN_LOCATOR (after))
4637	INSN_LOCATOR (after) = loc;
4638      if (after == last)
4639	break;
4640      after = NEXT_INSN (after);
4641    }
4642  return last;
4643}
4644
4645/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4646rtx
4647emit_call_insn_after (rtx pattern, rtx after)
4648{
4649  rtx prev = after;
4650
4651  while (DEBUG_INSN_P (prev))
4652    prev = PREV_INSN (prev);
4653
4654  if (INSN_P (prev))
4655    return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4656  else
4657    return emit_call_insn_after_noloc (pattern, after);
4658}
4659
4660/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4661rtx
4662emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4663{
4664  rtx last = emit_debug_insn_after_noloc (pattern, after);
4665
4666  if (pattern == NULL_RTX || !loc)
4667    return last;
4668
4669  after = NEXT_INSN (after);
4670  while (1)
4671    {
4672      if (active_insn_p (after) && !INSN_LOCATOR (after))
4673	INSN_LOCATOR (after) = loc;
4674      if (after == last)
4675	break;
4676      after = NEXT_INSN (after);
4677    }
4678  return last;
4679}
4680
4681/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4682rtx
4683emit_debug_insn_after (rtx pattern, rtx after)
4684{
4685  if (INSN_P (after))
4686    return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4687  else
4688    return emit_debug_insn_after_noloc (pattern, after);
4689}
4690
4691/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE.  */
4692rtx
4693emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4694{
4695  rtx first = PREV_INSN (before);
4696  rtx last = emit_insn_before_noloc (pattern, before, NULL);
4697
4698  if (pattern == NULL_RTX || !loc)
4699    return last;
4700
4701  if (!first)
4702    first = get_insns ();
4703  else
4704    first = NEXT_INSN (first);
4705  while (1)
4706    {
4707      if (active_insn_p (first) && !INSN_LOCATOR (first))
4708	INSN_LOCATOR (first) = loc;
4709      if (first == last)
4710	break;
4711      first = NEXT_INSN (first);
4712    }
4713  return last;
4714}
4715
4716/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4717rtx
4718emit_insn_before (rtx pattern, rtx before)
4719{
4720  rtx next = before;
4721
4722  while (DEBUG_INSN_P (next))
4723    next = PREV_INSN (next);
4724
4725  if (INSN_P (next))
4726    return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4727  else
4728    return emit_insn_before_noloc (pattern, before, NULL);
4729}
4730
4731/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4732rtx
4733emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4734{
4735  rtx first = PREV_INSN (before);
4736  rtx last = emit_jump_insn_before_noloc (pattern, before);
4737
4738  if (pattern == NULL_RTX)
4739    return last;
4740
4741  first = NEXT_INSN (first);
4742  while (1)
4743    {
4744      if (active_insn_p (first) && !INSN_LOCATOR (first))
4745	INSN_LOCATOR (first) = loc;
4746      if (first == last)
4747	break;
4748      first = NEXT_INSN (first);
4749    }
4750  return last;
4751}
4752
4753/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4754rtx
4755emit_jump_insn_before (rtx pattern, rtx before)
4756{
4757  rtx next = before;
4758
4759  while (DEBUG_INSN_P (next))
4760    next = PREV_INSN (next);
4761
4762  if (INSN_P (next))
4763    return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4764  else
4765    return emit_jump_insn_before_noloc (pattern, before);
4766}
4767
4768/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4769rtx
4770emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4771{
4772  rtx first = PREV_INSN (before);
4773  rtx last = emit_call_insn_before_noloc (pattern, before);
4774
4775  if (pattern == NULL_RTX)
4776    return last;
4777
4778  first = NEXT_INSN (first);
4779  while (1)
4780    {
4781      if (active_insn_p (first) && !INSN_LOCATOR (first))
4782	INSN_LOCATOR (first) = loc;
4783      if (first == last)
4784	break;
4785      first = NEXT_INSN (first);
4786    }
4787  return last;
4788}
4789
4790/* like emit_call_insn_before_noloc,
4791   but set insn_locator according to before.  */
4792rtx
4793emit_call_insn_before (rtx pattern, rtx before)
4794{
4795  rtx next = before;
4796
4797  while (DEBUG_INSN_P (next))
4798    next = PREV_INSN (next);
4799
4800  if (INSN_P (next))
4801    return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4802  else
4803    return emit_call_insn_before_noloc (pattern, before);
4804}
4805
4806/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4807rtx
4808emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4809{
4810  rtx first = PREV_INSN (before);
4811  rtx last = emit_debug_insn_before_noloc (pattern, before);
4812
4813  if (pattern == NULL_RTX)
4814    return last;
4815
4816  first = NEXT_INSN (first);
4817  while (1)
4818    {
4819      if (active_insn_p (first) && !INSN_LOCATOR (first))
4820	INSN_LOCATOR (first) = loc;
4821      if (first == last)
4822	break;
4823      first = NEXT_INSN (first);
4824    }
4825  return last;
4826}
4827
4828/* like emit_debug_insn_before_noloc,
4829   but set insn_locator according to before.  */
4830rtx
4831emit_debug_insn_before (rtx pattern, rtx before)
4832{
4833  if (INSN_P (before))
4834    return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4835  else
4836    return emit_debug_insn_before_noloc (pattern, before);
4837}
4838
4839/* Take X and emit it at the end of the doubly-linked
4840   INSN list.
4841
4842   Returns the last insn emitted.  */
4843
4844rtx
4845emit_insn (rtx x)
4846{
4847  rtx last = last_insn;
4848  rtx insn;
4849
4850  if (x == NULL_RTX)
4851    return last;
4852
4853  switch (GET_CODE (x))
4854    {
4855    case DEBUG_INSN:
4856    case INSN:
4857    case JUMP_INSN:
4858    case CALL_INSN:
4859    case CODE_LABEL:
4860    case BARRIER:
4861    case NOTE:
4862      insn = x;
4863      while (insn)
4864	{
4865	  rtx next = NEXT_INSN (insn);
4866	  add_insn (insn);
4867	  last = insn;
4868	  insn = next;
4869	}
4870      break;
4871
4872#ifdef ENABLE_RTL_CHECKING
4873    case SEQUENCE:
4874      gcc_unreachable ();
4875      break;
4876#endif
4877
4878    default:
4879      last = make_insn_raw (x);
4880      add_insn (last);
4881      break;
4882    }
4883
4884  return last;
4885}
4886
4887/* Make an insn of code DEBUG_INSN with pattern X
4888   and add it to the end of the doubly-linked list.  */
4889
4890rtx
4891emit_debug_insn (rtx x)
4892{
4893  rtx last = last_insn;
4894  rtx insn;
4895
4896  if (x == NULL_RTX)
4897    return last;
4898
4899  switch (GET_CODE (x))
4900    {
4901    case DEBUG_INSN:
4902    case INSN:
4903    case JUMP_INSN:
4904    case CALL_INSN:
4905    case CODE_LABEL:
4906    case BARRIER:
4907    case NOTE:
4908      insn = x;
4909      while (insn)
4910	{
4911	  rtx next = NEXT_INSN (insn);
4912	  add_insn (insn);
4913	  last = insn;
4914	  insn = next;
4915	}
4916      break;
4917
4918#ifdef ENABLE_RTL_CHECKING
4919    case SEQUENCE:
4920      gcc_unreachable ();
4921      break;
4922#endif
4923
4924    default:
4925      last = make_debug_insn_raw (x);
4926      add_insn (last);
4927      break;
4928    }
4929
4930  return last;
4931}
4932
4933/* Make an insn of code JUMP_INSN with pattern X
4934   and add it to the end of the doubly-linked list.  */
4935
4936rtx
4937emit_jump_insn (rtx x)
4938{
4939  rtx last = NULL_RTX, insn;
4940
4941  switch (GET_CODE (x))
4942    {
4943    case DEBUG_INSN:
4944    case INSN:
4945    case JUMP_INSN:
4946    case CALL_INSN:
4947    case CODE_LABEL:
4948    case BARRIER:
4949    case NOTE:
4950      insn = x;
4951      while (insn)
4952	{
4953	  rtx next = NEXT_INSN (insn);
4954	  add_insn (insn);
4955	  last = insn;
4956	  insn = next;
4957	}
4958      break;
4959
4960#ifdef ENABLE_RTL_CHECKING
4961    case SEQUENCE:
4962      gcc_unreachable ();
4963      break;
4964#endif
4965
4966    default:
4967      last = make_jump_insn_raw (x);
4968      add_insn (last);
4969      break;
4970    }
4971
4972  return last;
4973}
4974
4975/* Make an insn of code CALL_INSN with pattern X
4976   and add it to the end of the doubly-linked list.  */
4977
4978rtx
4979emit_call_insn (rtx x)
4980{
4981  rtx insn;
4982
4983  switch (GET_CODE (x))
4984    {
4985    case DEBUG_INSN:
4986    case INSN:
4987    case JUMP_INSN:
4988    case CALL_INSN:
4989    case CODE_LABEL:
4990    case BARRIER:
4991    case NOTE:
4992      insn = emit_insn (x);
4993      break;
4994
4995#ifdef ENABLE_RTL_CHECKING
4996    case SEQUENCE:
4997      gcc_unreachable ();
4998      break;
4999#endif
5000
5001    default:
5002      insn = make_call_insn_raw (x);
5003      add_insn (insn);
5004      break;
5005    }
5006
5007  return insn;
5008}
5009
5010/* Add the label LABEL to the end of the doubly-linked list.  */
5011
5012rtx
5013emit_label (rtx label)
5014{
5015  /* This can be called twice for the same label
5016     as a result of the confusion that follows a syntax error!
5017     So make it harmless.  */
5018  if (INSN_UID (label) == 0)
5019    {
5020      INSN_UID (label) = cur_insn_uid++;
5021      add_insn (label);
5022    }
5023  return label;
5024}
5025
5026/* Make an insn of code BARRIER
5027   and add it to the end of the doubly-linked list.  */
5028
5029rtx
5030emit_barrier (void)
5031{
5032  rtx barrier = rtx_alloc (BARRIER);
5033  INSN_UID (barrier) = cur_insn_uid++;
5034  add_insn (barrier);
5035  return barrier;
5036}
5037
5038/* Emit a copy of note ORIG.  */
5039
5040rtx
5041emit_note_copy (rtx orig)
5042{
5043  rtx note;
5044
5045  note = rtx_alloc (NOTE);
5046
5047  INSN_UID (note) = cur_insn_uid++;
5048  NOTE_DATA (note) = NOTE_DATA (orig);
5049  NOTE_KIND (note) = NOTE_KIND (orig);
5050  BLOCK_FOR_INSN (note) = NULL;
5051  add_insn (note);
5052
5053  return note;
5054}
5055
5056/* Make an insn of code NOTE or type NOTE_NO
5057   and add it to the end of the doubly-linked list.  */
5058
5059rtx
5060emit_note (enum insn_note kind)
5061{
5062  rtx note;
5063
5064  note = rtx_alloc (NOTE);
5065  INSN_UID (note) = cur_insn_uid++;
5066  NOTE_KIND (note) = kind;
5067  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
5068  BLOCK_FOR_INSN (note) = NULL;
5069  add_insn (note);
5070  return note;
5071}
5072
5073/* Emit a clobber of lvalue X.  */
5074
5075rtx
5076emit_clobber (rtx x)
5077{
5078  /* CONCATs should not appear in the insn stream.  */
5079  if (GET_CODE (x) == CONCAT)
5080    {
5081      emit_clobber (XEXP (x, 0));
5082      return emit_clobber (XEXP (x, 1));
5083    }
5084  return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5085}
5086
5087/* Return a sequence of insns to clobber lvalue X.  */
5088
5089rtx
5090gen_clobber (rtx x)
5091{
5092  rtx seq;
5093
5094  start_sequence ();
5095  emit_clobber (x);
5096  seq = get_insns ();
5097  end_sequence ();
5098  return seq;
5099}
5100
5101/* Emit a use of rvalue X.  */
5102
5103rtx
5104emit_use (rtx x)
5105{
5106  /* CONCATs should not appear in the insn stream.  */
5107  if (GET_CODE (x) == CONCAT)
5108    {
5109      emit_use (XEXP (x, 0));
5110      return emit_use (XEXP (x, 1));
5111    }
5112  return emit_insn (gen_rtx_USE (VOIDmode, x));
5113}
5114
5115/* Return a sequence of insns to use rvalue X.  */
5116
5117rtx
5118gen_use (rtx x)
5119{
5120  rtx seq;
5121
5122  start_sequence ();
5123  emit_use (x);
5124  seq = get_insns ();
5125  end_sequence ();
5126  return seq;
5127}
5128
5129/* Cause next statement to emit a line note even if the line number
5130   has not changed.  */
5131
5132void
5133force_next_line_note (void)
5134{
5135  last_location = -1;
5136}
5137
5138/* Place a note of KIND on insn INSN with DATUM as the datum. If a
5139   note of this type already exists, remove it first.  */
5140
5141rtx
5142set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5143{
5144  rtx note = find_reg_note (insn, kind, NULL_RTX);
5145
5146  switch (kind)
5147    {
5148    case REG_EQUAL:
5149    case REG_EQUIV:
5150      /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5151	 has multiple sets (some callers assume single_set
5152	 means the insn only has one set, when in fact it
5153	 means the insn only has one * useful * set).  */
5154      if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5155	{
5156	  gcc_assert (!note);
5157	  return NULL_RTX;
5158	}
5159
5160      /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5161	 It serves no useful purpose and breaks eliminate_regs.  */
5162      if (GET_CODE (datum) == ASM_OPERANDS)
5163	return NULL_RTX;
5164
5165      if (note)
5166	{
5167	  XEXP (note, 0) = datum;
5168	  df_notes_rescan (insn);
5169	  return note;
5170	}
5171      break;
5172
5173    default:
5174      if (note)
5175	{
5176	  XEXP (note, 0) = datum;
5177	  return note;
5178	}
5179      break;
5180    }
5181
5182  add_reg_note (insn, kind, datum);
5183
5184  switch (kind)
5185    {
5186    case REG_EQUAL:
5187    case REG_EQUIV:
5188      df_notes_rescan (insn);
5189      break;
5190    default:
5191      break;
5192    }
5193
5194  return REG_NOTES (insn);
5195}
5196
5197/* Return an indication of which type of insn should have X as a body.
5198   The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
5199
5200static enum rtx_code
5201classify_insn (rtx x)
5202{
5203  if (LABEL_P (x))
5204    return CODE_LABEL;
5205  if (GET_CODE (x) == CALL)
5206    return CALL_INSN;
5207  if (GET_CODE (x) == RETURN)
5208    return JUMP_INSN;
5209  if (GET_CODE (x) == SET)
5210    {
5211      if (SET_DEST (x) == pc_rtx)
5212	return JUMP_INSN;
5213      else if (GET_CODE (SET_SRC (x)) == CALL)
5214	return CALL_INSN;
5215      else
5216	return INSN;
5217    }
5218  if (GET_CODE (x) == PARALLEL)
5219    {
5220      int j;
5221      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5222	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5223	  return CALL_INSN;
5224	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5225		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5226	  return JUMP_INSN;
5227	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5228		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5229	  return CALL_INSN;
5230    }
5231  return INSN;
5232}
5233
5234/* Emit the rtl pattern X as an appropriate kind of insn.
5235   If X is a label, it is simply added into the insn chain.  */
5236
5237rtx
5238emit (rtx x)
5239{
5240  enum rtx_code code = classify_insn (x);
5241
5242  switch (code)
5243    {
5244    case CODE_LABEL:
5245      return emit_label (x);
5246    case INSN:
5247      return emit_insn (x);
5248    case  JUMP_INSN:
5249      {
5250	rtx insn = emit_jump_insn (x);
5251	if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5252	  return emit_barrier ();
5253	return insn;
5254      }
5255    case CALL_INSN:
5256      return emit_call_insn (x);
5257    case DEBUG_INSN:
5258      return emit_debug_insn (x);
5259    default:
5260      gcc_unreachable ();
5261    }
5262}
5263
5264/* Space for free sequence stack entries.  */
5265static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5266
5267/* Begin emitting insns to a sequence.  If this sequence will contain
5268   something that might cause the compiler to pop arguments to function
5269   calls (because those pops have previously been deferred; see
5270   INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5271   before calling this function.  That will ensure that the deferred
5272   pops are not accidentally emitted in the middle of this sequence.  */
5273
5274void
5275start_sequence (void)
5276{
5277  struct sequence_stack *tem;
5278
5279  if (free_sequence_stack != NULL)
5280    {
5281      tem = free_sequence_stack;
5282      free_sequence_stack = tem->next;
5283    }
5284  else
5285    tem = GGC_NEW (struct sequence_stack);
5286
5287  tem->next = seq_stack;
5288  tem->first = first_insn;
5289  tem->last = last_insn;
5290
5291  seq_stack = tem;
5292
5293  first_insn = 0;
5294  last_insn = 0;
5295}
5296
5297/* Set up the insn chain starting with FIRST as the current sequence,
5298   saving the previously current one.  See the documentation for
5299   start_sequence for more information about how to use this function.  */
5300
5301void
5302push_to_sequence (rtx first)
5303{
5304  rtx last;
5305
5306  start_sequence ();
5307
5308  for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5309
5310  first_insn = first;
5311  last_insn = last;
5312}
5313
5314/* Like push_to_sequence, but take the last insn as an argument to avoid
5315   looping through the list.  */
5316
5317void
5318push_to_sequence2 (rtx first, rtx last)
5319{
5320  start_sequence ();
5321
5322  first_insn = first;
5323  last_insn = last;
5324}
5325
5326/* Set up the outer-level insn chain
5327   as the current sequence, saving the previously current one.  */
5328
5329void
5330push_topmost_sequence (void)
5331{
5332  struct sequence_stack *stack, *top = NULL;
5333
5334  start_sequence ();
5335
5336  for (stack = seq_stack; stack; stack = stack->next)
5337    top = stack;
5338
5339  first_insn = top->first;
5340  last_insn = top->last;
5341}
5342
5343/* After emitting to the outer-level insn chain, update the outer-level
5344   insn chain, and restore the previous saved state.  */
5345
5346void
5347pop_topmost_sequence (void)
5348{
5349  struct sequence_stack *stack, *top = NULL;
5350
5351  for (stack = seq_stack; stack; stack = stack->next)
5352    top = stack;
5353
5354  top->first = first_insn;
5355  top->last = last_insn;
5356
5357  end_sequence ();
5358}
5359
5360/* After emitting to a sequence, restore previous saved state.
5361
5362   To get the contents of the sequence just made, you must call
5363   `get_insns' *before* calling here.
5364
5365   If the compiler might have deferred popping arguments while
5366   generating this sequence, and this sequence will not be immediately
5367   inserted into the instruction stream, use do_pending_stack_adjust
5368   before calling get_insns.  That will ensure that the deferred
5369   pops are inserted into this sequence, and not into some random
5370   location in the instruction stream.  See INHIBIT_DEFER_POP for more
5371   information about deferred popping of arguments.  */
5372
5373void
5374end_sequence (void)
5375{
5376  struct sequence_stack *tem = seq_stack;
5377
5378  first_insn = tem->first;
5379  last_insn = tem->last;
5380  seq_stack = tem->next;
5381
5382  memset (tem, 0, sizeof (*tem));
5383  tem->next = free_sequence_stack;
5384  free_sequence_stack = tem;
5385}
5386
5387/* Return 1 if currently emitting into a sequence.  */
5388
5389int
5390in_sequence_p (void)
5391{
5392  return seq_stack != 0;
5393}
5394
5395/* Put the various virtual registers into REGNO_REG_RTX.  */
5396
5397static void
5398init_virtual_regs (void)
5399{
5400  regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5401  regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5402  regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5403  regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5404  regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5405}
5406
5407
5408/* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5409static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5410static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5411static int copy_insn_n_scratches;
5412
5413/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5414   copied an ASM_OPERANDS.
5415   In that case, it is the original input-operand vector.  */
5416static rtvec orig_asm_operands_vector;
5417
5418/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5419   copied an ASM_OPERANDS.
5420   In that case, it is the copied input-operand vector.  */
5421static rtvec copy_asm_operands_vector;
5422
5423/* Likewise for the constraints vector.  */
5424static rtvec orig_asm_constraints_vector;
5425static rtvec copy_asm_constraints_vector;
5426
5427/* Recursively create a new copy of an rtx for copy_insn.
5428   This function differs from copy_rtx in that it handles SCRATCHes and
5429   ASM_OPERANDs properly.
5430   Normally, this function is not used directly; use copy_insn as front end.
5431   However, you could first copy an insn pattern with copy_insn and then use
5432   this function afterwards to properly copy any REG_NOTEs containing
5433   SCRATCHes.  */
5434
5435rtx
5436copy_insn_1 (rtx orig)
5437{
5438  rtx copy;
5439  int i, j;
5440  RTX_CODE code;
5441  const char *format_ptr;
5442
5443  if (orig == NULL)
5444    return NULL;
5445
5446  code = GET_CODE (orig);
5447
5448  switch (code)
5449    {
5450    case REG:
5451    case CONST_INT:
5452    case CONST_DOUBLE:
5453    case CONST_FIXED:
5454    case CONST_VECTOR:
5455    case SYMBOL_REF:
5456    case CODE_LABEL:
5457    case PC:
5458    case CC0:
5459      return orig;
5460    case CLOBBER:
5461      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5462	return orig;
5463      break;
5464
5465    case SCRATCH:
5466      for (i = 0; i < copy_insn_n_scratches; i++)
5467	if (copy_insn_scratch_in[i] == orig)
5468	  return copy_insn_scratch_out[i];
5469      break;
5470
5471    case CONST:
5472      if (shared_const_p (orig))
5473	return orig;
5474      break;
5475
5476      /* A MEM with a constant address is not sharable.  The problem is that
5477	 the constant address may need to be reloaded.  If the mem is shared,
5478	 then reloading one copy of this mem will cause all copies to appear
5479	 to have been reloaded.  */
5480
5481    default:
5482      break;
5483    }
5484
5485  /* Copy the various flags, fields, and other information.  We assume
5486     that all fields need copying, and then clear the fields that should
5487     not be copied.  That is the sensible default behavior, and forces
5488     us to explicitly document why we are *not* copying a flag.  */
5489  copy = shallow_copy_rtx (orig);
5490
5491  /* We do not copy the USED flag, which is used as a mark bit during
5492     walks over the RTL.  */
5493  RTX_FLAG (copy, used) = 0;
5494
5495  /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5496  if (INSN_P (orig))
5497    {
5498      RTX_FLAG (copy, jump) = 0;
5499      RTX_FLAG (copy, call) = 0;
5500      RTX_FLAG (copy, frame_related) = 0;
5501    }
5502
5503  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5504
5505  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5506    switch (*format_ptr++)
5507      {
5508      case 'e':
5509	if (XEXP (orig, i) != NULL)
5510	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5511	break;
5512
5513      case 'E':
5514      case 'V':
5515	if (XVEC (orig, i) == orig_asm_constraints_vector)
5516	  XVEC (copy, i) = copy_asm_constraints_vector;
5517	else if (XVEC (orig, i) == orig_asm_operands_vector)
5518	  XVEC (copy, i) = copy_asm_operands_vector;
5519	else if (XVEC (orig, i) != NULL)
5520	  {
5521	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5522	    for (j = 0; j < XVECLEN (copy, i); j++)
5523	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5524	  }
5525	break;
5526
5527      case 't':
5528      case 'w':
5529      case 'i':
5530      case 's':
5531      case 'S':
5532      case 'u':
5533      case '0':
5534	/* These are left unchanged.  */
5535	break;
5536
5537      default:
5538	gcc_unreachable ();
5539      }
5540
5541  if (code == SCRATCH)
5542    {
5543      i = copy_insn_n_scratches++;
5544      gcc_assert (i < MAX_RECOG_OPERANDS);
5545      copy_insn_scratch_in[i] = orig;
5546      copy_insn_scratch_out[i] = copy;
5547    }
5548  else if (code == ASM_OPERANDS)
5549    {
5550      orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5551      copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5552      orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5553      copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5554    }
5555
5556  return copy;
5557}
5558
5559/* Create a new copy of an rtx.
5560   This function differs from copy_rtx in that it handles SCRATCHes and
5561   ASM_OPERANDs properly.
5562   INSN doesn't really have to be a full INSN; it could be just the
5563   pattern.  */
5564rtx
5565copy_insn (rtx insn)
5566{
5567  copy_insn_n_scratches = 0;
5568  orig_asm_operands_vector = 0;
5569  orig_asm_constraints_vector = 0;
5570  copy_asm_operands_vector = 0;
5571  copy_asm_constraints_vector = 0;
5572  return copy_insn_1 (insn);
5573}
5574
5575/* Initialize data structures and variables in this file
5576   before generating rtl for each function.  */
5577
5578void
5579init_emit (void)
5580{
5581  first_insn = NULL;
5582  last_insn = NULL;
5583  if (MIN_NONDEBUG_INSN_UID)
5584    cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5585  else
5586    cur_insn_uid = 1;
5587  cur_debug_insn_uid = 1;
5588  reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5589  last_location = UNKNOWN_LOCATION;
5590  first_label_num = label_num;
5591  seq_stack = NULL;
5592
5593  /* Init the tables that describe all the pseudo regs.  */
5594
5595  crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5596
5597  crtl->emit.regno_pointer_align
5598    = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5599
5600  regno_reg_rtx
5601    = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
5602
5603  /* Put copies of all the hard registers into regno_reg_rtx.  */
5604  memcpy (regno_reg_rtx,
5605	  static_regno_reg_rtx,
5606	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5607
5608  /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5609  init_virtual_regs ();
5610
5611  /* Indicate that the virtual registers and stack locations are
5612     all pointers.  */
5613  REG_POINTER (stack_pointer_rtx) = 1;
5614  REG_POINTER (frame_pointer_rtx) = 1;
5615  REG_POINTER (hard_frame_pointer_rtx) = 1;
5616  REG_POINTER (arg_pointer_rtx) = 1;
5617
5618  REG_POINTER (virtual_incoming_args_rtx) = 1;
5619  REG_POINTER (virtual_stack_vars_rtx) = 1;
5620  REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5621  REG_POINTER (virtual_outgoing_args_rtx) = 1;
5622  REG_POINTER (virtual_cfa_rtx) = 1;
5623
5624#ifdef STACK_BOUNDARY
5625  REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5626  REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5627  REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5628  REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5629
5630  REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5631  REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5632  REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5633  REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5634  REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5635#endif
5636
5637#ifdef INIT_EXPANDERS
5638  INIT_EXPANDERS;
5639#endif
5640}
5641
5642/* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5643
5644static rtx
5645gen_const_vector (enum machine_mode mode, int constant)
5646{
5647  rtx tem;
5648  rtvec v;
5649  int units, i;
5650  enum machine_mode inner;
5651
5652  units = GET_MODE_NUNITS (mode);
5653  inner = GET_MODE_INNER (mode);
5654
5655  gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5656
5657  v = rtvec_alloc (units);
5658
5659  /* We need to call this function after we set the scalar const_tiny_rtx
5660     entries.  */
5661  gcc_assert (const_tiny_rtx[constant][(int) inner]);
5662
5663  for (i = 0; i < units; ++i)
5664    RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5665
5666  tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5667  return tem;
5668}
5669
5670/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5671   all elements are zero, and the one vector when all elements are one.  */
5672rtx
5673gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5674{
5675  enum machine_mode inner = GET_MODE_INNER (mode);
5676  int nunits = GET_MODE_NUNITS (mode);
5677  rtx x;
5678  int i;
5679
5680  /* Check to see if all of the elements have the same value.  */
5681  x = RTVEC_ELT (v, nunits - 1);
5682  for (i = nunits - 2; i >= 0; i--)
5683    if (RTVEC_ELT (v, i) != x)
5684      break;
5685
5686  /* If the values are all the same, check to see if we can use one of the
5687     standard constant vectors.  */
5688  if (i == -1)
5689    {
5690      if (x == CONST0_RTX (inner))
5691	return CONST0_RTX (mode);
5692      else if (x == CONST1_RTX (inner))
5693	return CONST1_RTX (mode);
5694    }
5695
5696  return gen_rtx_raw_CONST_VECTOR (mode, v);
5697}
5698
5699/* Initialise global register information required by all functions.  */
5700
5701void
5702init_emit_regs (void)
5703{
5704  int i;
5705
5706  /* Reset register attributes */
5707  htab_empty (reg_attrs_htab);
5708
5709  /* We need reg_raw_mode, so initialize the modes now.  */
5710  init_reg_modes_target ();
5711
5712  /* Assign register numbers to the globally defined register rtx.  */
5713  pc_rtx = gen_rtx_PC (VOIDmode);
5714  cc0_rtx = gen_rtx_CC0 (VOIDmode);
5715  stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5716  frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5717  hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5718  arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5719  virtual_incoming_args_rtx =
5720    gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5721  virtual_stack_vars_rtx =
5722    gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5723  virtual_stack_dynamic_rtx =
5724    gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5725  virtual_outgoing_args_rtx =
5726    gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5727  virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5728
5729  /* Initialize RTL for commonly used hard registers.  These are
5730     copied into regno_reg_rtx as we begin to compile each function.  */
5731  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5732    static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5733
5734#ifdef RETURN_ADDRESS_POINTER_REGNUM
5735  return_address_pointer_rtx
5736    = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5737#endif
5738
5739  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5740    pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5741  else
5742    pic_offset_table_rtx = NULL_RTX;
5743}
5744
5745/* Create some permanent unique rtl objects shared between all functions.  */
5746
5747void
5748init_emit_once (void)
5749{
5750  int i;
5751  enum machine_mode mode;
5752  enum machine_mode double_mode;
5753
5754  /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5755     hash tables.  */
5756  const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5757				    const_int_htab_eq, NULL);
5758
5759  const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5760				       const_double_htab_eq, NULL);
5761
5762  const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5763				      const_fixed_htab_eq, NULL);
5764
5765  mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5766				    mem_attrs_htab_eq, NULL);
5767  reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5768				    reg_attrs_htab_eq, NULL);
5769
5770  /* Compute the word and byte modes.  */
5771
5772  byte_mode = VOIDmode;
5773  word_mode = VOIDmode;
5774  double_mode = VOIDmode;
5775
5776  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5777       mode != VOIDmode;
5778       mode = GET_MODE_WIDER_MODE (mode))
5779    {
5780      if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5781	  && byte_mode == VOIDmode)
5782	byte_mode = mode;
5783
5784      if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5785	  && word_mode == VOIDmode)
5786	word_mode = mode;
5787    }
5788
5789  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5790       mode != VOIDmode;
5791       mode = GET_MODE_WIDER_MODE (mode))
5792    {
5793      if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5794	  && double_mode == VOIDmode)
5795	double_mode = mode;
5796    }
5797
5798  ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5799
5800#ifdef INIT_EXPANDERS
5801  /* This is to initialize {init|mark|free}_machine_status before the first
5802     call to push_function_context_to.  This is needed by the Chill front
5803     end which calls push_function_context_to before the first call to
5804     init_function_start.  */
5805  INIT_EXPANDERS;
5806#endif
5807
5808  /* Create the unique rtx's for certain rtx codes and operand values.  */
5809
5810  /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5811     tries to use these variables.  */
5812  for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5813    const_int_rtx[i + MAX_SAVED_CONST_INT] =
5814      gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5815
5816  if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5817      && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5818    const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5819  else
5820    const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5821
5822  REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5823  REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5824  REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5825
5826  dconstm1 = dconst1;
5827  dconstm1.sign = 1;
5828
5829  dconsthalf = dconst1;
5830  SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5831
5832  for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5833    {
5834      const REAL_VALUE_TYPE *const r =
5835	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5836
5837      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5838	   mode != VOIDmode;
5839	   mode = GET_MODE_WIDER_MODE (mode))
5840	const_tiny_rtx[i][(int) mode] =
5841	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5842
5843      for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5844	   mode != VOIDmode;
5845	   mode = GET_MODE_WIDER_MODE (mode))
5846	const_tiny_rtx[i][(int) mode] =
5847	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5848
5849      const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5850
5851      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5852	   mode != VOIDmode;
5853	   mode = GET_MODE_WIDER_MODE (mode))
5854	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5855
5856      for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5857	   mode != VOIDmode;
5858	   mode = GET_MODE_WIDER_MODE (mode))
5859	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5860    }
5861
5862  for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5863       mode != VOIDmode;
5864       mode = GET_MODE_WIDER_MODE (mode))
5865    {
5866      rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5867      const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5868    }
5869
5870  for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5871       mode != VOIDmode;
5872       mode = GET_MODE_WIDER_MODE (mode))
5873    {
5874      rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5875      const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5876    }
5877
5878  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5879       mode != VOIDmode;
5880       mode = GET_MODE_WIDER_MODE (mode))
5881    {
5882      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5883      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5884    }
5885
5886  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5887       mode != VOIDmode;
5888       mode = GET_MODE_WIDER_MODE (mode))
5889    {
5890      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5891      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5892    }
5893
5894  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5895       mode != VOIDmode;
5896       mode = GET_MODE_WIDER_MODE (mode))
5897    {
5898      FCONST0(mode).data.high = 0;
5899      FCONST0(mode).data.low = 0;
5900      FCONST0(mode).mode = mode;
5901      const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5902				      FCONST0 (mode), mode);
5903    }
5904
5905  for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5906       mode != VOIDmode;
5907       mode = GET_MODE_WIDER_MODE (mode))
5908    {
5909      FCONST0(mode).data.high = 0;
5910      FCONST0(mode).data.low = 0;
5911      FCONST0(mode).mode = mode;
5912      const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5913				      FCONST0 (mode), mode);
5914    }
5915
5916  for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5917       mode != VOIDmode;
5918       mode = GET_MODE_WIDER_MODE (mode))
5919    {
5920      FCONST0(mode).data.high = 0;
5921      FCONST0(mode).data.low = 0;
5922      FCONST0(mode).mode = mode;
5923      const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5924				      FCONST0 (mode), mode);
5925
5926      /* We store the value 1.  */
5927      FCONST1(mode).data.high = 0;
5928      FCONST1(mode).data.low = 0;
5929      FCONST1(mode).mode = mode;
5930      lshift_double (1, 0, GET_MODE_FBIT (mode),
5931                     2 * HOST_BITS_PER_WIDE_INT,
5932                     &FCONST1(mode).data.low,
5933		     &FCONST1(mode).data.high,
5934                     SIGNED_FIXED_POINT_MODE_P (mode));
5935      const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5936				      FCONST1 (mode), mode);
5937    }
5938
5939  for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5940       mode != VOIDmode;
5941       mode = GET_MODE_WIDER_MODE (mode))
5942    {
5943      FCONST0(mode).data.high = 0;
5944      FCONST0(mode).data.low = 0;
5945      FCONST0(mode).mode = mode;
5946      const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5947				      FCONST0 (mode), mode);
5948
5949      /* We store the value 1.  */
5950      FCONST1(mode).data.high = 0;
5951      FCONST1(mode).data.low = 0;
5952      FCONST1(mode).mode = mode;
5953      lshift_double (1, 0, GET_MODE_FBIT (mode),
5954                     2 * HOST_BITS_PER_WIDE_INT,
5955                     &FCONST1(mode).data.low,
5956		     &FCONST1(mode).data.high,
5957                     SIGNED_FIXED_POINT_MODE_P (mode));
5958      const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5959				      FCONST1 (mode), mode);
5960    }
5961
5962  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5963       mode != VOIDmode;
5964       mode = GET_MODE_WIDER_MODE (mode))
5965    {
5966      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5967    }
5968
5969  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5970       mode != VOIDmode;
5971       mode = GET_MODE_WIDER_MODE (mode))
5972    {
5973      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5974    }
5975
5976  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5977       mode != VOIDmode;
5978       mode = GET_MODE_WIDER_MODE (mode))
5979    {
5980      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5981      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5982    }
5983
5984  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5985       mode != VOIDmode;
5986       mode = GET_MODE_WIDER_MODE (mode))
5987    {
5988      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5989      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5990    }
5991
5992  for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5993    if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5994      const_tiny_rtx[0][i] = const0_rtx;
5995
5996  const_tiny_rtx[0][(int) BImode] = const0_rtx;
5997  if (STORE_FLAG_VALUE == 1)
5998    const_tiny_rtx[1][(int) BImode] = const1_rtx;
5999}
6000
6001/* Produce exact duplicate of insn INSN after AFTER.
6002   Care updating of libcall regions if present.  */
6003
6004rtx
6005emit_copy_of_insn_after (rtx insn, rtx after)
6006{
6007  rtx new_rtx, link;
6008
6009  switch (GET_CODE (insn))
6010    {
6011    case INSN:
6012      new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6013      break;
6014
6015    case JUMP_INSN:
6016      new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6017      break;
6018
6019    case DEBUG_INSN:
6020      new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6021      break;
6022
6023    case CALL_INSN:
6024      new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6025      if (CALL_INSN_FUNCTION_USAGE (insn))
6026	CALL_INSN_FUNCTION_USAGE (new_rtx)
6027	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6028      SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6029      RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6030      RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6031      RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6032	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6033      break;
6034
6035    default:
6036      gcc_unreachable ();
6037    }
6038
6039  /* Update LABEL_NUSES.  */
6040  mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6041
6042  INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
6043
6044  /* If the old insn is frame related, then so is the new one.  This is
6045     primarily needed for IA-64 unwind info which marks epilogue insns,
6046     which may be duplicated by the basic block reordering code.  */
6047  RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6048
6049  /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6050     will make them.  REG_LABEL_TARGETs are created there too, but are
6051     supposed to be sticky, so we copy them.  */
6052  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6053    if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6054      {
6055	if (GET_CODE (link) == EXPR_LIST)
6056	  add_reg_note (new_rtx, REG_NOTE_KIND (link),
6057			copy_insn_1 (XEXP (link, 0)));
6058	else
6059	  add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6060      }
6061
6062  INSN_CODE (new_rtx) = INSN_CODE (insn);
6063  return new_rtx;
6064}
6065
6066static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6067rtx
6068gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6069{
6070  if (hard_reg_clobbers[mode][regno])
6071    return hard_reg_clobbers[mode][regno];
6072  else
6073    return (hard_reg_clobbers[mode][regno] =
6074	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6075}
6076
6077#include "gt-emit-rtl.h"
6078