1/* Reload pseudo regs into hard regs for insns that require hard regs.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27
28#include "machmode.h"
29#include "hard-reg-set.h"
30#include "rtl.h"
31#include "tm_p.h"
32#include "obstack.h"
33#include "insn-config.h"
34#include "flags.h"
35#include "function.h"
36#include "expr.h"
37#include "optabs.h"
38#include "regs.h"
39#include "basic-block.h"
40#include "reload.h"
41#include "recog.h"
42#include "output.h"
43#include "real.h"
44#include "toplev.h"
45#include "except.h"
46#include "tree.h"
47
48/* This file contains the reload pass of the compiler, which is
49   run after register allocation has been done.  It checks that
50   each insn is valid (operands required to be in registers really
51   are in registers of the proper class) and fixes up invalid ones
52   by copying values temporarily into registers for the insns
53   that need them.
54
55   The results of register allocation are described by the vector
56   reg_renumber; the insns still contain pseudo regs, but reg_renumber
57   can be used to find which hard reg, if any, a pseudo reg is in.
58
59   The technique we always use is to free up a few hard regs that are
60   called ``reload regs'', and for each place where a pseudo reg
61   must be in a hard reg, copy it temporarily into one of the reload regs.
62
63   Reload regs are allocated locally for every instruction that needs
64   reloads.  When there are pseudos which are allocated to a register that
65   has been chosen as a reload reg, such pseudos must be ``spilled''.
66   This means that they go to other hard regs, or to stack slots if no other
67   available hard regs can be found.  Spilling can invalidate more
68   insns, requiring additional need for reloads, so we must keep checking
69   until the process stabilizes.
70
71   For machines with different classes of registers, we must keep track
72   of the register class needed for each reload, and make sure that
73   we allocate enough reload registers of each class.
74
75   The file reload.c contains the code that checks one insn for
76   validity and reports the reloads that it needs.  This file
77   is in charge of scanning the entire rtl code, accumulating the
78   reload needs, spilling, assigning reload registers to use for
79   fixing up each insn, and generating the new insns to copy values
80   into the reload registers.  */
81
82/* During reload_as_needed, element N contains a REG rtx for the hard reg
83   into which reg N has been reloaded (perhaps for a previous insn).  */
84static rtx *reg_last_reload_reg;
85
86/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
87   for an output reload that stores into reg N.  */
88static char *reg_has_output_reload;
89
90/* Indicates which hard regs are reload-registers for an output reload
91   in the current insn.  */
92static HARD_REG_SET reg_is_output_reload;
93
94/* Element N is the constant value to which pseudo reg N is equivalent,
95   or zero if pseudo reg N is not equivalent to a constant.
96   find_reloads looks at this in order to replace pseudo reg N
97   with the constant it stands for.  */
98rtx *reg_equiv_constant;
99
100/* Element N is an invariant value to which pseudo reg N is equivalent.
101   eliminate_regs_in_insn uses this to replace pseudos in particular
102   contexts.  */
103rtx *reg_equiv_invariant;
104
105/* Element N is a memory location to which pseudo reg N is equivalent,
106   prior to any register elimination (such as frame pointer to stack
107   pointer).  Depending on whether or not it is a valid address, this value
108   is transferred to either reg_equiv_address or reg_equiv_mem.  */
109rtx *reg_equiv_memory_loc;
110
111/* We allocate reg_equiv_memory_loc inside a varray so that the garbage
112   collector can keep track of what is inside.  */
113varray_type reg_equiv_memory_loc_varray;
114
115/* Element N is the address of stack slot to which pseudo reg N is equivalent.
116   This is used when the address is not valid as a memory address
117   (because its displacement is too big for the machine.)  */
118rtx *reg_equiv_address;
119
120/* Element N is the memory slot to which pseudo reg N is equivalent,
121   or zero if pseudo reg N is not equivalent to a memory slot.  */
122rtx *reg_equiv_mem;
123
124/* Widest width in which each pseudo reg is referred to (via subreg).  */
125static unsigned int *reg_max_ref_width;
126
127/* Element N is the list of insns that initialized reg N from its equivalent
128   constant or memory slot.  */
129rtx *reg_equiv_init;
130int reg_equiv_init_size;
131
132/* Vector to remember old contents of reg_renumber before spilling.  */
133static short *reg_old_renumber;
134
135/* During reload_as_needed, element N contains the last pseudo regno reloaded
136   into hard register N.  If that pseudo reg occupied more than one register,
137   reg_reloaded_contents points to that pseudo for each spill register in
138   use; all of these must remain set for an inheritance to occur.  */
139static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
140
141/* During reload_as_needed, element N contains the insn for which
142   hard register N was last used.   Its contents are significant only
143   when reg_reloaded_valid is set for this register.  */
144static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
145
146/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
147static HARD_REG_SET reg_reloaded_valid;
148/* Indicate if the register was dead at the end of the reload.
149   This is only valid if reg_reloaded_contents is set and valid.  */
150static HARD_REG_SET reg_reloaded_dead;
151
152/* Indicate whether the register's current value is one that is not
153   safe to retain across a call, even for registers that are normally
154   call-saved.  */
155static HARD_REG_SET reg_reloaded_call_part_clobbered;
156
157/* Number of spill-regs so far; number of valid elements of spill_regs.  */
158static int n_spills;
159
160/* In parallel with spill_regs, contains REG rtx's for those regs.
161   Holds the last rtx used for any given reg, or 0 if it has never
162   been used for spilling yet.  This rtx is reused, provided it has
163   the proper mode.  */
164static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
165
166/* In parallel with spill_regs, contains nonzero for a spill reg
167   that was stored after the last time it was used.
168   The precise value is the insn generated to do the store.  */
169static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
170
171/* This is the register that was stored with spill_reg_store.  This is a
172   copy of reload_out / reload_out_reg when the value was stored; if
173   reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
174static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
175
176/* This table is the inverse mapping of spill_regs:
177   indexed by hard reg number,
178   it contains the position of that reg in spill_regs,
179   or -1 for something that is not in spill_regs.
180
181   ?!?  This is no longer accurate.  */
182static short spill_reg_order[FIRST_PSEUDO_REGISTER];
183
184/* This reg set indicates registers that can't be used as spill registers for
185   the currently processed insn.  These are the hard registers which are live
186   during the insn, but not allocated to pseudos, as well as fixed
187   registers.  */
188static HARD_REG_SET bad_spill_regs;
189
190/* These are the hard registers that can't be used as spill register for any
191   insn.  This includes registers used for user variables and registers that
192   we can't eliminate.  A register that appears in this set also can't be used
193   to retry register allocation.  */
194static HARD_REG_SET bad_spill_regs_global;
195
196/* Describes order of use of registers for reloading
197   of spilled pseudo-registers.  `n_spills' is the number of
198   elements that are actually valid; new ones are added at the end.
199
200   Both spill_regs and spill_reg_order are used on two occasions:
201   once during find_reload_regs, where they keep track of the spill registers
202   for a single insn, but also during reload_as_needed where they show all
203   the registers ever used by reload.  For the latter case, the information
204   is calculated during finish_spills.  */
205static short spill_regs[FIRST_PSEUDO_REGISTER];
206
207/* This vector of reg sets indicates, for each pseudo, which hard registers
208   may not be used for retrying global allocation because the register was
209   formerly spilled from one of them.  If we allowed reallocating a pseudo to
210   a register that it was already allocated to, reload might not
211   terminate.  */
212static HARD_REG_SET *pseudo_previous_regs;
213
214/* This vector of reg sets indicates, for each pseudo, which hard
215   registers may not be used for retrying global allocation because they
216   are used as spill registers during one of the insns in which the
217   pseudo is live.  */
218static HARD_REG_SET *pseudo_forbidden_regs;
219
220/* All hard regs that have been used as spill registers for any insn are
221   marked in this set.  */
222static HARD_REG_SET used_spill_regs;
223
224/* Index of last register assigned as a spill register.  We allocate in
225   a round-robin fashion.  */
226static int last_spill_reg;
227
228/* Nonzero if indirect addressing is supported on the machine; this means
229   that spilling (REG n) does not require reloading it into a register in
230   order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))).  The
231   value indicates the level of indirect addressing supported, e.g., two
232   means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
233   a hard register.  */
234static char spill_indirect_levels;
235
236/* Nonzero if indirect addressing is supported when the innermost MEM is
237   of the form (MEM (SYMBOL_REF sym)).  It is assumed that the level to
238   which these are valid is the same as spill_indirect_levels, above.  */
239char indirect_symref_ok;
240
241/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid.  */
242char double_reg_address_ok;
243
244/* Record the stack slot for each spilled hard register.  */
245static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
246
247/* Width allocated so far for that stack slot.  */
248static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
249
250/* Record which pseudos needed to be spilled.  */
251static regset_head spilled_pseudos;
252
253/* Used for communication between order_regs_for_reload and count_pseudo.
254   Used to avoid counting one pseudo twice.  */
255static regset_head pseudos_counted;
256
257/* First uid used by insns created by reload in this function.
258   Used in find_equiv_reg.  */
259int reload_first_uid;
260
261/* Flag set by local-alloc or global-alloc if anything is live in
262   a call-clobbered reg across calls.  */
263int caller_save_needed;
264
265/* Set to 1 while reload_as_needed is operating.
266   Required by some machines to handle any generated moves differently.  */
267int reload_in_progress = 0;
268
269/* These arrays record the insn_code of insns that may be needed to
270   perform input and output reloads of special objects.  They provide a
271   place to pass a scratch register.  */
272enum insn_code reload_in_optab[NUM_MACHINE_MODES];
273enum insn_code reload_out_optab[NUM_MACHINE_MODES];
274
275/* This obstack is used for allocation of rtl during register elimination.
276   The allocated storage can be freed once find_reloads has processed the
277   insn.  */
278static struct obstack reload_obstack;
279
280/* Points to the beginning of the reload_obstack.  All insn_chain structures
281   are allocated first.  */
282static char *reload_startobj;
283
284/* The point after all insn_chain structures.  Used to quickly deallocate
285   memory allocated in copy_reloads during calculate_needs_all_insns.  */
286static char *reload_firstobj;
287
288/* This points before all local rtl generated by register elimination.
289   Used to quickly free all memory after processing one insn.  */
290static char *reload_insn_firstobj;
291
292/* List of insn_chain instructions, one for every insn that reload needs to
293   examine.  */
294struct insn_chain *reload_insn_chain;
295
296/* List of all insns needing reloads.  */
297static struct insn_chain *insns_need_reload;
298
299/* This structure is used to record information about register eliminations.
300   Each array entry describes one possible way of eliminating a register
301   in favor of another.   If there is more than one way of eliminating a
302   particular register, the most preferred should be specified first.  */
303
304struct elim_table
305{
306  int from;			/* Register number to be eliminated.  */
307  int to;			/* Register number used as replacement.  */
308  HOST_WIDE_INT initial_offset;	/* Initial difference between values.  */
309  int can_eliminate;		/* Nonzero if this elimination can be done.  */
310  int can_eliminate_previous;	/* Value of CAN_ELIMINATE in previous scan over
311				   insns made by reload.  */
312  HOST_WIDE_INT offset;		/* Current offset between the two regs.  */
313  HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
314  int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
315  rtx from_rtx;			/* REG rtx for the register to be eliminated.
316				   We cannot simply compare the number since
317				   we might then spuriously replace a hard
318				   register corresponding to a pseudo
319				   assigned to the reg to be eliminated.  */
320  rtx to_rtx;			/* REG rtx for the replacement.  */
321};
322
323static struct elim_table *reg_eliminate = 0;
324
325/* This is an intermediate structure to initialize the table.  It has
326   exactly the members provided by ELIMINABLE_REGS.  */
327static const struct elim_table_1
328{
329  const int from;
330  const int to;
331} reg_eliminate_1[] =
332
333/* If a set of eliminable registers was specified, define the table from it.
334   Otherwise, default to the normal case of the frame pointer being
335   replaced by the stack pointer.  */
336
337#ifdef ELIMINABLE_REGS
338  ELIMINABLE_REGS;
339#else
340  {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
341#endif
342
343#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
344
345/* Record the number of pending eliminations that have an offset not equal
346   to their initial offset.  If nonzero, we use a new copy of each
347   replacement result in any insns encountered.  */
348int num_not_at_initial_offset;
349
350/* Count the number of registers that we may be able to eliminate.  */
351static int num_eliminable;
352/* And the number of registers that are equivalent to a constant that
353   can be eliminated to frame_pointer / arg_pointer + constant.  */
354static int num_eliminable_invariants;
355
356/* For each label, we record the offset of each elimination.  If we reach
357   a label by more than one path and an offset differs, we cannot do the
358   elimination.  This information is indexed by the difference of the
359   number of the label and the first label number.  We can't offset the
360   pointer itself as this can cause problems on machines with segmented
361   memory.  The first table is an array of flags that records whether we
362   have yet encountered a label and the second table is an array of arrays,
363   one entry in the latter array for each elimination.  */
364
365static int first_label_num;
366static char *offsets_known_at;
367static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
368
369/* Number of labels in the current function.  */
370
371static int num_labels;
372
373static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
374static void maybe_fix_stack_asms (void);
375static void copy_reloads (struct insn_chain *);
376static void calculate_needs_all_insns (int);
377static int find_reg (struct insn_chain *, int);
378static void find_reload_regs (struct insn_chain *);
379static void select_reload_regs (void);
380static void delete_caller_save_insns (void);
381
382static void spill_failure (rtx, enum reg_class);
383static void count_spilled_pseudo (int, int, int);
384static void delete_dead_insn (rtx);
385static void alter_reg (int, int);
386static void set_label_offsets (rtx, rtx, int);
387static void check_eliminable_occurrences (rtx);
388static void elimination_effects (rtx, enum machine_mode);
389static int eliminate_regs_in_insn (rtx, int);
390static void update_eliminable_offsets (void);
391static void mark_not_eliminable (rtx, rtx, void *);
392static void set_initial_elim_offsets (void);
393static bool verify_initial_elim_offsets (void);
394static void set_initial_label_offsets (void);
395static void set_offsets_for_label (rtx);
396static void init_elim_table (void);
397static void update_eliminables (HARD_REG_SET *);
398static void spill_hard_reg (unsigned int, int);
399static int finish_spills (int);
400static void scan_paradoxical_subregs (rtx);
401static void count_pseudo (int);
402static void order_regs_for_reload (struct insn_chain *);
403static void reload_as_needed (int);
404static void forget_old_reloads_1 (rtx, rtx, void *);
405static int reload_reg_class_lower (const void *, const void *);
406static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
407				    enum machine_mode);
408static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
409				     enum machine_mode);
410static int reload_reg_free_p (unsigned int, int, enum reload_type);
411static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
412					rtx, rtx, int, int);
413static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
414			     rtx, rtx, int, int);
415static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
416static int allocate_reload_reg (struct insn_chain *, int, int);
417static int conflicts_with_override (rtx);
418static void failed_reload (rtx, int);
419static int set_reload_reg (int, int);
420static void choose_reload_regs_init (struct insn_chain *, rtx *);
421static void choose_reload_regs (struct insn_chain *);
422static void merge_assigned_reloads (rtx);
423static void emit_input_reload_insns (struct insn_chain *, struct reload *,
424				     rtx, int);
425static void emit_output_reload_insns (struct insn_chain *, struct reload *,
426				      int);
427static void do_input_reload (struct insn_chain *, struct reload *, int);
428static void do_output_reload (struct insn_chain *, struct reload *, int);
429static bool inherit_piecemeal_p (int, int);
430static void emit_reload_insns (struct insn_chain *);
431static void delete_output_reload (rtx, int, int);
432static void delete_address_reloads (rtx, rtx);
433static void delete_address_reloads_1 (rtx, rtx, rtx);
434static rtx inc_for_reload (rtx, rtx, rtx, int);
435#ifdef AUTO_INC_DEC
436static void add_auto_inc_notes (rtx, rtx);
437#endif
438static void copy_eh_notes (rtx, rtx);
439static int reloads_conflict (int, int);
440static rtx gen_reload (rtx, rtx, int, enum reload_type);
441static rtx emit_insn_if_valid_for_reload (rtx);
442
443/* Initialize the reload pass once per compilation.  */
444
445void
446init_reload (void)
447{
448  int i;
449
450  /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
451     Set spill_indirect_levels to the number of levels such addressing is
452     permitted, zero if it is not permitted at all.  */
453
454  rtx tem
455    = gen_rtx_MEM (Pmode,
456		   gen_rtx_PLUS (Pmode,
457				 gen_rtx_REG (Pmode,
458					      LAST_VIRTUAL_REGISTER + 1),
459				 GEN_INT (4)));
460  spill_indirect_levels = 0;
461
462  while (memory_address_p (QImode, tem))
463    {
464      spill_indirect_levels++;
465      tem = gen_rtx_MEM (Pmode, tem);
466    }
467
468  /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
469
470  tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
471  indirect_symref_ok = memory_address_p (QImode, tem);
472
473  /* See if reg+reg is a valid (and offsettable) address.  */
474
475  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
476    {
477      tem = gen_rtx_PLUS (Pmode,
478			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
479			  gen_rtx_REG (Pmode, i));
480
481      /* This way, we make sure that reg+reg is an offsettable address.  */
482      tem = plus_constant (tem, 4);
483
484      if (memory_address_p (QImode, tem))
485	{
486	  double_reg_address_ok = 1;
487	  break;
488	}
489    }
490
491  /* Initialize obstack for our rtl allocation.  */
492  gcc_obstack_init (&reload_obstack);
493  reload_startobj = obstack_alloc (&reload_obstack, 0);
494
495  INIT_REG_SET (&spilled_pseudos);
496  INIT_REG_SET (&pseudos_counted);
497  VARRAY_RTX_INIT (reg_equiv_memory_loc_varray, 0, "reg_equiv_memory_loc");
498}
499
500/* List of insn chains that are currently unused.  */
501static struct insn_chain *unused_insn_chains = 0;
502
503/* Allocate an empty insn_chain structure.  */
504struct insn_chain *
505new_insn_chain (void)
506{
507  struct insn_chain *c;
508
509  if (unused_insn_chains == 0)
510    {
511      c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
512      INIT_REG_SET (&c->live_throughout);
513      INIT_REG_SET (&c->dead_or_set);
514    }
515  else
516    {
517      c = unused_insn_chains;
518      unused_insn_chains = c->next;
519    }
520  c->is_caller_save_insn = 0;
521  c->need_operand_change = 0;
522  c->need_reload = 0;
523  c->need_elim = 0;
524  return c;
525}
526
527/* Small utility function to set all regs in hard reg set TO which are
528   allocated to pseudos in regset FROM.  */
529
530void
531compute_use_by_pseudos (HARD_REG_SET *to, regset from)
532{
533  unsigned int regno;
534  reg_set_iterator rsi;
535
536  EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
537    {
538      int r = reg_renumber[regno];
539      int nregs;
540
541      if (r < 0)
542	{
543	  /* reload_combine uses the information from
544	     BASIC_BLOCK->global_live_at_start, which might still
545	     contain registers that have not actually been allocated
546	     since they have an equivalence.  */
547	  gcc_assert (reload_completed);
548	}
549      else
550	{
551	  nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
552	  while (nregs-- > 0)
553	    SET_HARD_REG_BIT (*to, r + nregs);
554	}
555    }
556}
557
558/* Replace all pseudos found in LOC with their corresponding
559   equivalences.  */
560
561static void
562replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
563{
564  rtx x = *loc;
565  enum rtx_code code;
566  const char *fmt;
567  int i, j;
568
569  if (! x)
570    return;
571
572  code = GET_CODE (x);
573  if (code == REG)
574    {
575      unsigned int regno = REGNO (x);
576
577      if (regno < FIRST_PSEUDO_REGISTER)
578	return;
579
580      x = eliminate_regs (x, mem_mode, usage);
581      if (x != *loc)
582	{
583	  *loc = x;
584	  replace_pseudos_in (loc, mem_mode, usage);
585	  return;
586	}
587
588      if (reg_equiv_constant[regno])
589	*loc = reg_equiv_constant[regno];
590      else if (reg_equiv_mem[regno])
591	*loc = reg_equiv_mem[regno];
592      else if (reg_equiv_address[regno])
593	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
594      else
595	{
596	  gcc_assert (!REG_P (regno_reg_rtx[regno])
597		      || REGNO (regno_reg_rtx[regno]) != regno);
598	  *loc = regno_reg_rtx[regno];
599	}
600
601      return;
602    }
603  else if (code == MEM)
604    {
605      replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
606      return;
607    }
608
609  /* Process each of our operands recursively.  */
610  fmt = GET_RTX_FORMAT (code);
611  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
612    if (*fmt == 'e')
613      replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
614    else if (*fmt == 'E')
615      for (j = 0; j < XVECLEN (x, i); j++)
616	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
617}
618
619
620/* Global variables used by reload and its subroutines.  */
621
622/* Set during calculate_needs if an insn needs register elimination.  */
623static int something_needs_elimination;
624/* Set during calculate_needs if an insn needs an operand changed.  */
625static int something_needs_operands_changed;
626
627/* Nonzero means we couldn't get enough spill regs.  */
628static int failure;
629
630/* Main entry point for the reload pass.
631
632   FIRST is the first insn of the function being compiled.
633
634   GLOBAL nonzero means we were called from global_alloc
635   and should attempt to reallocate any pseudoregs that we
636   displace from hard regs we will use for reloads.
637   If GLOBAL is zero, we do not have enough information to do that,
638   so any pseudo reg that is spilled must go to the stack.
639
640   Return value is nonzero if reload failed
641   and we must not do any more for this function.  */
642
643int
644reload (rtx first, int global)
645{
646  int i;
647  rtx insn;
648  struct elim_table *ep;
649  basic_block bb;
650
651  /* Make sure even insns with volatile mem refs are recognizable.  */
652  init_recog ();
653
654  failure = 0;
655
656  reload_firstobj = obstack_alloc (&reload_obstack, 0);
657
658  /* Make sure that the last insn in the chain
659     is not something that needs reloading.  */
660  emit_note (NOTE_INSN_DELETED);
661
662  /* Enable find_equiv_reg to distinguish insns made by reload.  */
663  reload_first_uid = get_max_uid ();
664
665#ifdef SECONDARY_MEMORY_NEEDED
666  /* Initialize the secondary memory table.  */
667  clear_secondary_mem ();
668#endif
669
670  /* We don't have a stack slot for any spill reg yet.  */
671  memset (spill_stack_slot, 0, sizeof spill_stack_slot);
672  memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
673
674  /* Initialize the save area information for caller-save, in case some
675     are needed.  */
676  init_save_areas ();
677
678  /* Compute which hard registers are now in use
679     as homes for pseudo registers.
680     This is done here rather than (eg) in global_alloc
681     because this point is reached even if not optimizing.  */
682  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
683    mark_home_live (i);
684
685  /* A function that receives a nonlocal goto must save all call-saved
686     registers.  */
687  if (current_function_has_nonlocal_label)
688    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
689      if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
690	regs_ever_live[i] = 1;
691
692  /* Find all the pseudo registers that didn't get hard regs
693     but do have known equivalent constants or memory slots.
694     These include parameters (known equivalent to parameter slots)
695     and cse'd or loop-moved constant memory addresses.
696
697     Record constant equivalents in reg_equiv_constant
698     so they will be substituted by find_reloads.
699     Record memory equivalents in reg_mem_equiv so they can
700     be substituted eventually by altering the REG-rtx's.  */
701
702  reg_equiv_constant = xcalloc (max_regno, sizeof (rtx));
703  reg_equiv_invariant = xcalloc (max_regno, sizeof (rtx));
704  reg_equiv_mem = xcalloc (max_regno, sizeof (rtx));
705  reg_equiv_address = xcalloc (max_regno, sizeof (rtx));
706  reg_max_ref_width = xcalloc (max_regno, sizeof (int));
707  reg_old_renumber = xcalloc (max_regno, sizeof (short));
708  memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
709  pseudo_forbidden_regs = xmalloc (max_regno * sizeof (HARD_REG_SET));
710  pseudo_previous_regs = xcalloc (max_regno, sizeof (HARD_REG_SET));
711
712  CLEAR_HARD_REG_SET (bad_spill_regs_global);
713
714  /* Look for REG_EQUIV notes; record what each pseudo is equivalent
715     to.  Also find all paradoxical subregs and find largest such for
716     each pseudo.  */
717
718  num_eliminable_invariants = 0;
719  for (insn = first; insn; insn = NEXT_INSN (insn))
720    {
721      rtx set = single_set (insn);
722
723      /* We may introduce USEs that we want to remove at the end, so
724	 we'll mark them with QImode.  Make sure there are no
725	 previously-marked insns left by say regmove.  */
726      if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
727	  && GET_MODE (insn) != VOIDmode)
728	PUT_MODE (insn, VOIDmode);
729
730      if (INSN_P (insn))
731	scan_paradoxical_subregs (PATTERN (insn));
732
733      if (set != 0 && REG_P (SET_DEST (set)))
734	{
735	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
736	  rtx x;
737
738	  if (! note)
739	    continue;
740
741	  i = REGNO (SET_DEST (set));
742	  x = XEXP (note, 0);
743
744	  if (i <= LAST_VIRTUAL_REGISTER)
745	    continue;
746
747	  if (! function_invariant_p (x)
748	      || ! flag_pic
749	      /* A function invariant is often CONSTANT_P but may
750		 include a register.  We promise to only pass
751		 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P.  */
752	      || (CONSTANT_P (x)
753		  && LEGITIMATE_PIC_OPERAND_P (x)))
754	    {
755	      /* It can happen that a REG_EQUIV note contains a MEM
756		 that is not a legitimate memory operand.  As later
757		 stages of reload assume that all addresses found
758		 in the reg_equiv_* arrays were originally legitimate,
759		 we ignore such REG_EQUIV notes.  */
760	      if (memory_operand (x, VOIDmode))
761		{
762		  /* Always unshare the equivalence, so we can
763		     substitute into this insn without touching the
764		       equivalence.  */
765		  reg_equiv_memory_loc[i] = copy_rtx (x);
766		}
767	      else if (function_invariant_p (x))
768		{
769		  if (GET_CODE (x) == PLUS)
770		    {
771		      /* This is PLUS of frame pointer and a constant,
772			 and might be shared.  Unshare it.  */
773		      reg_equiv_invariant[i] = copy_rtx (x);
774		      num_eliminable_invariants++;
775		    }
776		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
777		    {
778		      reg_equiv_invariant[i] = x;
779		      num_eliminable_invariants++;
780		    }
781		  else if (LEGITIMATE_CONSTANT_P (x))
782		    reg_equiv_constant[i] = x;
783		  else
784		    {
785		      reg_equiv_memory_loc[i]
786			= force_const_mem (GET_MODE (SET_DEST (set)), x);
787		      if (! reg_equiv_memory_loc[i])
788			reg_equiv_init[i] = NULL_RTX;
789		    }
790		}
791	      else
792		{
793		  reg_equiv_init[i] = NULL_RTX;
794		  continue;
795		}
796	    }
797	  else
798	    reg_equiv_init[i] = NULL_RTX;
799	}
800    }
801
802  if (dump_file)
803    for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
804      if (reg_equiv_init[i])
805	{
806	  fprintf (dump_file, "init_insns for %u: ", i);
807	  print_inline_rtx (dump_file, reg_equiv_init[i], 20);
808	  fprintf (dump_file, "\n");
809	}
810
811  init_elim_table ();
812
813  first_label_num = get_first_label_num ();
814  num_labels = max_label_num () - first_label_num;
815
816  /* Allocate the tables used to store offset information at labels.  */
817  /* We used to use alloca here, but the size of what it would try to
818     allocate would occasionally cause it to exceed the stack limit and
819     cause a core dump.  */
820  offsets_known_at = xmalloc (num_labels);
821  offsets_at = xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
822
823  /* Alter each pseudo-reg rtx to contain its hard reg number.
824     Assign stack slots to the pseudos that lack hard regs or equivalents.
825     Do not touch virtual registers.  */
826
827  for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
828    alter_reg (i, -1);
829
830  /* If we have some registers we think can be eliminated, scan all insns to
831     see if there is an insn that sets one of these registers to something
832     other than itself plus a constant.  If so, the register cannot be
833     eliminated.  Doing this scan here eliminates an extra pass through the
834     main reload loop in the most common case where register elimination
835     cannot be done.  */
836  for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
837    if (INSN_P (insn))
838      note_stores (PATTERN (insn), mark_not_eliminable, NULL);
839
840  maybe_fix_stack_asms ();
841
842  insns_need_reload = 0;
843  something_needs_elimination = 0;
844
845  /* Initialize to -1, which means take the first spill register.  */
846  last_spill_reg = -1;
847
848  /* Spill any hard regs that we know we can't eliminate.  */
849  CLEAR_HARD_REG_SET (used_spill_regs);
850  /* There can be multiple ways to eliminate a register;
851     they should be listed adjacently.
852     Elimination for any register fails only if all possible ways fail.  */
853  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
854    {
855      int from = ep->from;
856      int can_eliminate = 0;
857      do
858	{
859          can_eliminate |= ep->can_eliminate;
860          ep++;
861	}
862      while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
863      if (! can_eliminate)
864	spill_hard_reg (from, 1);
865    }
866
867#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
868  if (frame_pointer_needed)
869    spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
870#endif
871  finish_spills (global);
872
873  /* From now on, we may need to generate moves differently.  We may also
874     allow modifications of insns which cause them to not be recognized.
875     Any such modifications will be cleaned up during reload itself.  */
876  reload_in_progress = 1;
877
878  /* This loop scans the entire function each go-round
879     and repeats until one repetition spills no additional hard regs.  */
880  for (;;)
881    {
882      int something_changed;
883      int did_spill;
884
885      HOST_WIDE_INT starting_frame_size;
886
887      /* Round size of stack frame to stack_alignment_needed.  This must be done
888	 here because the stack size may be a part of the offset computation
889	 for register elimination, and there might have been new stack slots
890	 created in the last iteration of this loop.  */
891      if (cfun->stack_alignment_needed)
892        assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
893
894      starting_frame_size = get_frame_size ();
895
896      set_initial_elim_offsets ();
897      set_initial_label_offsets ();
898
899      /* For each pseudo register that has an equivalent location defined,
900	 try to eliminate any eliminable registers (such as the frame pointer)
901	 assuming initial offsets for the replacement register, which
902	 is the normal case.
903
904	 If the resulting location is directly addressable, substitute
905	 the MEM we just got directly for the old REG.
906
907	 If it is not addressable but is a constant or the sum of a hard reg
908	 and constant, it is probably not addressable because the constant is
909	 out of range, in that case record the address; we will generate
910	 hairy code to compute the address in a register each time it is
911	 needed.  Similarly if it is a hard register, but one that is not
912	 valid as an address register.
913
914	 If the location is not addressable, but does not have one of the
915	 above forms, assign a stack slot.  We have to do this to avoid the
916	 potential of producing lots of reloads if, e.g., a location involves
917	 a pseudo that didn't get a hard register and has an equivalent memory
918	 location that also involves a pseudo that didn't get a hard register.
919
920	 Perhaps at some point we will improve reload_when_needed handling
921	 so this problem goes away.  But that's very hairy.  */
922
923      for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
924	if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
925	  {
926	    rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
927
928	    if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
929					 XEXP (x, 0)))
930	      reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
931	    else if (CONSTANT_P (XEXP (x, 0))
932		     || (REG_P (XEXP (x, 0))
933			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
934		     || (GET_CODE (XEXP (x, 0)) == PLUS
935			 && REG_P (XEXP (XEXP (x, 0), 0))
936			 && (REGNO (XEXP (XEXP (x, 0), 0))
937			     < FIRST_PSEUDO_REGISTER)
938			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
939	      reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
940	    else
941	      {
942		/* Make a new stack slot.  Then indicate that something
943		   changed so we go back and recompute offsets for
944		   eliminable registers because the allocation of memory
945		   below might change some offset.  reg_equiv_{mem,address}
946		   will be set up for this pseudo on the next pass around
947		   the loop.  */
948		reg_equiv_memory_loc[i] = 0;
949		reg_equiv_init[i] = 0;
950		alter_reg (i, -1);
951	      }
952	  }
953
954      if (caller_save_needed)
955	setup_save_areas ();
956
957      /* If we allocated another stack slot, redo elimination bookkeeping.  */
958      if (starting_frame_size != get_frame_size ())
959	continue;
960
961      if (caller_save_needed)
962	{
963	  save_call_clobbered_regs ();
964	  /* That might have allocated new insn_chain structures.  */
965	  reload_firstobj = obstack_alloc (&reload_obstack, 0);
966	}
967
968      calculate_needs_all_insns (global);
969
970      CLEAR_REG_SET (&spilled_pseudos);
971      did_spill = 0;
972
973      something_changed = 0;
974
975      /* If we allocated any new memory locations, make another pass
976	 since it might have changed elimination offsets.  */
977      if (starting_frame_size != get_frame_size ())
978	something_changed = 1;
979
980      /* Even if the frame size remained the same, we might still have
981	 changed elimination offsets, e.g. if find_reloads called
982	 force_const_mem requiring the back end to allocate a constant
983	 pool base register that needs to be saved on the stack.  */
984      else if (!verify_initial_elim_offsets ())
985	something_changed = 1;
986
987      {
988	HARD_REG_SET to_spill;
989	CLEAR_HARD_REG_SET (to_spill);
990	update_eliminables (&to_spill);
991	AND_COMPL_HARD_REG_SET(used_spill_regs, to_spill);
992
993	for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
994	  if (TEST_HARD_REG_BIT (to_spill, i))
995	    {
996	      spill_hard_reg (i, 1);
997	      did_spill = 1;
998
999	      /* Regardless of the state of spills, if we previously had
1000		 a register that we thought we could eliminate, but now can
1001		 not eliminate, we must run another pass.
1002
1003		 Consider pseudos which have an entry in reg_equiv_* which
1004		 reference an eliminable register.  We must make another pass
1005		 to update reg_equiv_* so that we do not substitute in the
1006		 old value from when we thought the elimination could be
1007		 performed.  */
1008	      something_changed = 1;
1009	    }
1010      }
1011
1012      select_reload_regs ();
1013      if (failure)
1014	goto failed;
1015
1016      if (insns_need_reload != 0 || did_spill)
1017	something_changed |= finish_spills (global);
1018
1019      if (! something_changed)
1020	break;
1021
1022      if (caller_save_needed)
1023	delete_caller_save_insns ();
1024
1025      obstack_free (&reload_obstack, reload_firstobj);
1026    }
1027
1028  /* If global-alloc was run, notify it of any register eliminations we have
1029     done.  */
1030  if (global)
1031    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1032      if (ep->can_eliminate)
1033	mark_elimination (ep->from, ep->to);
1034
1035  /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1036     If that insn didn't set the register (i.e., it copied the register to
1037     memory), just delete that insn instead of the equivalencing insn plus
1038     anything now dead.  If we call delete_dead_insn on that insn, we may
1039     delete the insn that actually sets the register if the register dies
1040     there and that is incorrect.  */
1041
1042  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1043    {
1044      if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1045	{
1046	  rtx list;
1047	  for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1048	    {
1049	      rtx equiv_insn = XEXP (list, 0);
1050
1051	      /* If we already deleted the insn or if it may trap, we can't
1052		 delete it.  The latter case shouldn't happen, but can
1053		 if an insn has a variable address, gets a REG_EH_REGION
1054		 note added to it, and then gets converted into a load
1055		 from a constant address.  */
1056	      if (NOTE_P (equiv_insn)
1057		  || can_throw_internal (equiv_insn))
1058		;
1059	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1060		delete_dead_insn (equiv_insn);
1061	      else
1062		SET_INSN_DELETED (equiv_insn);
1063	    }
1064	}
1065    }
1066
1067  /* Use the reload registers where necessary
1068     by generating move instructions to move the must-be-register
1069     values into or out of the reload registers.  */
1070
1071  if (insns_need_reload != 0 || something_needs_elimination
1072      || something_needs_operands_changed)
1073    {
1074      HOST_WIDE_INT old_frame_size = get_frame_size ();
1075
1076      reload_as_needed (global);
1077
1078      gcc_assert (old_frame_size == get_frame_size ());
1079
1080      gcc_assert (verify_initial_elim_offsets ());
1081    }
1082
1083  /* If we were able to eliminate the frame pointer, show that it is no
1084     longer live at the start of any basic block.  If it ls live by
1085     virtue of being in a pseudo, that pseudo will be marked live
1086     and hence the frame pointer will be known to be live via that
1087     pseudo.  */
1088
1089  if (! frame_pointer_needed)
1090    FOR_EACH_BB (bb)
1091      CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start,
1092			   HARD_FRAME_POINTER_REGNUM);
1093
1094  /* Come here (with failure set nonzero) if we can't get enough spill
1095     regs.  */
1096 failed:
1097
1098  CLEAR_REG_SET (&spilled_pseudos);
1099  reload_in_progress = 0;
1100
1101  /* Now eliminate all pseudo regs by modifying them into
1102     their equivalent memory references.
1103     The REG-rtx's for the pseudos are modified in place,
1104     so all insns that used to refer to them now refer to memory.
1105
1106     For a reg that has a reg_equiv_address, all those insns
1107     were changed by reloading so that no insns refer to it any longer;
1108     but the DECL_RTL of a variable decl may refer to it,
1109     and if so this causes the debugging info to mention the variable.  */
1110
1111  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1112    {
1113      rtx addr = 0;
1114
1115      if (reg_equiv_mem[i])
1116	addr = XEXP (reg_equiv_mem[i], 0);
1117
1118      if (reg_equiv_address[i])
1119	addr = reg_equiv_address[i];
1120
1121      if (addr)
1122	{
1123	  if (reg_renumber[i] < 0)
1124	    {
1125	      rtx reg = regno_reg_rtx[i];
1126
1127	      REG_USERVAR_P (reg) = 0;
1128	      PUT_CODE (reg, MEM);
1129	      XEXP (reg, 0) = addr;
1130	      if (reg_equiv_memory_loc[i])
1131		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1132	      else
1133		{
1134		  MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1135		  MEM_ATTRS (reg) = 0;
1136		}
1137	      MEM_NOTRAP_P (reg) = 1;
1138	    }
1139	  else if (reg_equiv_mem[i])
1140	    XEXP (reg_equiv_mem[i], 0) = addr;
1141	}
1142    }
1143
1144  /* We must set reload_completed now since the cleanup_subreg_operands call
1145     below will re-recognize each insn and reload may have generated insns
1146     which are only valid during and after reload.  */
1147  reload_completed = 1;
1148
1149  /* Make a pass over all the insns and delete all USEs which we inserted
1150     only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1151     notes.  Delete all CLOBBER insns, except those that refer to the return
1152     value and the special mem:BLK CLOBBERs added to prevent the scheduler
1153     from misarranging variable-array code, and simplify (subreg (reg))
1154     operands.  Also remove all REG_RETVAL and REG_LIBCALL notes since they
1155     are no longer useful or accurate.  Strip and regenerate REG_INC notes
1156     that may have been moved around.  */
1157
1158  for (insn = first; insn; insn = NEXT_INSN (insn))
1159    if (INSN_P (insn))
1160      {
1161	rtx *pnote;
1162
1163	if (CALL_P (insn))
1164	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1165			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1166
1167	if ((GET_CODE (PATTERN (insn)) == USE
1168	     /* We mark with QImode USEs introduced by reload itself.  */
1169	     && (GET_MODE (insn) == QImode
1170		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1171	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1172		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1173		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1174		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1175			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1176				!= stack_pointer_rtx))
1177		&& (!REG_P (XEXP (PATTERN (insn), 0))
1178		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1179	  {
1180	    delete_insn (insn);
1181	    continue;
1182	  }
1183
1184	/* Some CLOBBERs may survive until here and still reference unassigned
1185	   pseudos with const equivalent, which may in turn cause ICE in later
1186	   passes if the reference remains in place.  */
1187	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1188	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1189			      VOIDmode, PATTERN (insn));
1190
1191	/* Discard obvious no-ops, even without -O.  This optimization
1192	   is fast and doesn't interfere with debugging.  */
1193	if (NONJUMP_INSN_P (insn)
1194	    && GET_CODE (PATTERN (insn)) == SET
1195	    && REG_P (SET_SRC (PATTERN (insn)))
1196	    && REG_P (SET_DEST (PATTERN (insn)))
1197	    && (REGNO (SET_SRC (PATTERN (insn)))
1198		== REGNO (SET_DEST (PATTERN (insn)))))
1199	  {
1200	    delete_insn (insn);
1201	    continue;
1202	  }
1203
1204	pnote = &REG_NOTES (insn);
1205	while (*pnote != 0)
1206	  {
1207	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1208		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1209		|| REG_NOTE_KIND (*pnote) == REG_INC
1210		|| REG_NOTE_KIND (*pnote) == REG_RETVAL
1211		|| REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1212	      *pnote = XEXP (*pnote, 1);
1213	    else
1214	      pnote = &XEXP (*pnote, 1);
1215	  }
1216
1217#ifdef AUTO_INC_DEC
1218	add_auto_inc_notes (insn, PATTERN (insn));
1219#endif
1220
1221	/* And simplify (subreg (reg)) if it appears as an operand.  */
1222	cleanup_subreg_operands (insn);
1223      }
1224
1225  /* If we are doing stack checking, give a warning if this function's
1226     frame size is larger than we expect.  */
1227  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1228    {
1229      HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1230      static int verbose_warned = 0;
1231
1232      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1233	if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1234	  size += UNITS_PER_WORD;
1235
1236      if (size > STACK_CHECK_MAX_FRAME_SIZE)
1237	{
1238	  warning (0, "frame size too large for reliable stack checking");
1239	  if (! verbose_warned)
1240	    {
1241	      warning (0, "try reducing the number of local variables");
1242	      verbose_warned = 1;
1243	    }
1244	}
1245    }
1246
1247  /* Indicate that we no longer have known memory locations or constants.  */
1248  if (reg_equiv_constant)
1249    free (reg_equiv_constant);
1250  if (reg_equiv_invariant)
1251    free (reg_equiv_invariant);
1252  reg_equiv_constant = 0;
1253  reg_equiv_invariant = 0;
1254  VARRAY_GROW (reg_equiv_memory_loc_varray, 0);
1255  reg_equiv_memory_loc = 0;
1256
1257  if (offsets_known_at)
1258    free (offsets_known_at);
1259  if (offsets_at)
1260    free (offsets_at);
1261
1262  free (reg_equiv_mem);
1263  reg_equiv_init = 0;
1264  free (reg_equiv_address);
1265  free (reg_max_ref_width);
1266  free (reg_old_renumber);
1267  free (pseudo_previous_regs);
1268  free (pseudo_forbidden_regs);
1269
1270  CLEAR_HARD_REG_SET (used_spill_regs);
1271  for (i = 0; i < n_spills; i++)
1272    SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1273
1274  /* Free all the insn_chain structures at once.  */
1275  obstack_free (&reload_obstack, reload_startobj);
1276  unused_insn_chains = 0;
1277  fixup_abnormal_edges ();
1278
1279  /* Replacing pseudos with their memory equivalents might have
1280     created shared rtx.  Subsequent passes would get confused
1281     by this, so unshare everything here.  */
1282  unshare_all_rtl_again (first);
1283
1284#ifdef STACK_BOUNDARY
1285  /* init_emit has set the alignment of the hard frame pointer
1286     to STACK_BOUNDARY.  It is very likely no longer valid if
1287     the hard frame pointer was used for register allocation.  */
1288  if (!frame_pointer_needed)
1289    REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1290#endif
1291
1292  return failure;
1293}
1294
1295/* Yet another special case.  Unfortunately, reg-stack forces people to
1296   write incorrect clobbers in asm statements.  These clobbers must not
1297   cause the register to appear in bad_spill_regs, otherwise we'll call
1298   fatal_insn later.  We clear the corresponding regnos in the live
1299   register sets to avoid this.
1300   The whole thing is rather sick, I'm afraid.  */
1301
1302static void
1303maybe_fix_stack_asms (void)
1304{
1305#ifdef STACK_REGS
1306  const char *constraints[MAX_RECOG_OPERANDS];
1307  enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1308  struct insn_chain *chain;
1309
1310  for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1311    {
1312      int i, noperands;
1313      HARD_REG_SET clobbered, allowed;
1314      rtx pat;
1315
1316      if (! INSN_P (chain->insn)
1317	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1318	continue;
1319      pat = PATTERN (chain->insn);
1320      if (GET_CODE (pat) != PARALLEL)
1321	continue;
1322
1323      CLEAR_HARD_REG_SET (clobbered);
1324      CLEAR_HARD_REG_SET (allowed);
1325
1326      /* First, make a mask of all stack regs that are clobbered.  */
1327      for (i = 0; i < XVECLEN (pat, 0); i++)
1328	{
1329	  rtx t = XVECEXP (pat, 0, i);
1330	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1331	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1332	}
1333
1334      /* Get the operand values and constraints out of the insn.  */
1335      decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1336			   constraints, operand_mode);
1337
1338      /* For every operand, see what registers are allowed.  */
1339      for (i = 0; i < noperands; i++)
1340	{
1341	  const char *p = constraints[i];
1342	  /* For every alternative, we compute the class of registers allowed
1343	     for reloading in CLS, and merge its contents into the reg set
1344	     ALLOWED.  */
1345	  int cls = (int) NO_REGS;
1346
1347	  for (;;)
1348	    {
1349	      char c = *p;
1350
1351	      if (c == '\0' || c == ',' || c == '#')
1352		{
1353		  /* End of one alternative - mark the regs in the current
1354		     class, and reset the class.  */
1355		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1356		  cls = NO_REGS;
1357		  p++;
1358		  if (c == '#')
1359		    do {
1360		      c = *p++;
1361		    } while (c != '\0' && c != ',');
1362		  if (c == '\0')
1363		    break;
1364		  continue;
1365		}
1366
1367	      switch (c)
1368		{
1369		case '=': case '+': case '*': case '%': case '?': case '!':
1370		case '0': case '1': case '2': case '3': case '4': case 'm':
1371		case '<': case '>': case 'V': case 'o': case '&': case 'E':
1372		case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1373		case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1374		case 'P':
1375		  break;
1376
1377		case 'p':
1378		  cls = (int) reg_class_subunion[cls]
1379		    [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1380		  break;
1381
1382		case 'g':
1383		case 'r':
1384		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1385		  break;
1386
1387		default:
1388		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1389		    cls = (int) reg_class_subunion[cls]
1390		      [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1391		  else
1392		    cls = (int) reg_class_subunion[cls]
1393		      [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1394		}
1395	      p += CONSTRAINT_LEN (c, p);
1396	    }
1397	}
1398      /* Those of the registers which are clobbered, but allowed by the
1399	 constraints, must be usable as reload registers.  So clear them
1400	 out of the life information.  */
1401      AND_HARD_REG_SET (allowed, clobbered);
1402      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1403	if (TEST_HARD_REG_BIT (allowed, i))
1404	  {
1405	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1406	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1407	  }
1408    }
1409
1410#endif
1411}
1412
1413/* Copy the global variables n_reloads and rld into the corresponding elts
1414   of CHAIN.  */
1415static void
1416copy_reloads (struct insn_chain *chain)
1417{
1418  chain->n_reloads = n_reloads;
1419  chain->rld = obstack_alloc (&reload_obstack,
1420			      n_reloads * sizeof (struct reload));
1421  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1422  reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1423}
1424
1425/* Walk the chain of insns, and determine for each whether it needs reloads
1426   and/or eliminations.  Build the corresponding insns_need_reload list, and
1427   set something_needs_elimination as appropriate.  */
1428static void
1429calculate_needs_all_insns (int global)
1430{
1431  struct insn_chain **pprev_reload = &insns_need_reload;
1432  struct insn_chain *chain, *next = 0;
1433
1434  something_needs_elimination = 0;
1435
1436  reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
1437  for (chain = reload_insn_chain; chain != 0; chain = next)
1438    {
1439      rtx insn = chain->insn;
1440
1441      next = chain->next;
1442
1443      /* Clear out the shortcuts.  */
1444      chain->n_reloads = 0;
1445      chain->need_elim = 0;
1446      chain->need_reload = 0;
1447      chain->need_operand_change = 0;
1448
1449      /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1450	 include REG_LABEL), we need to see what effects this has on the
1451	 known offsets at labels.  */
1452
1453      if (LABEL_P (insn) || JUMP_P (insn)
1454	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1455	set_label_offsets (insn, insn, 0);
1456
1457      if (INSN_P (insn))
1458	{
1459	  rtx old_body = PATTERN (insn);
1460	  int old_code = INSN_CODE (insn);
1461	  rtx old_notes = REG_NOTES (insn);
1462	  int did_elimination = 0;
1463	  int operands_changed = 0;
1464	  rtx set = single_set (insn);
1465
1466	  /* Skip insns that only set an equivalence.  */
1467	  if (set && REG_P (SET_DEST (set))
1468	      && reg_renumber[REGNO (SET_DEST (set))] < 0
1469	      && (reg_equiv_constant[REGNO (SET_DEST (set))]
1470		  || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1471		      && reg_equiv_init[REGNO (SET_DEST (set))])
1472	    continue;
1473
1474	  /* If needed, eliminate any eliminable registers.  */
1475	  if (num_eliminable || num_eliminable_invariants)
1476	    did_elimination = eliminate_regs_in_insn (insn, 0);
1477
1478	  /* Analyze the instruction.  */
1479	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1480					   global, spill_reg_order);
1481
1482	  /* If a no-op set needs more than one reload, this is likely
1483	     to be something that needs input address reloads.  We
1484	     can't get rid of this cleanly later, and it is of no use
1485	     anyway, so discard it now.
1486	     We only do this when expensive_optimizations is enabled,
1487	     since this complements reload inheritance / output
1488	     reload deletion, and it can make debugging harder.  */
1489	  if (flag_expensive_optimizations && n_reloads > 1)
1490	    {
1491	      rtx set = single_set (insn);
1492	      if (set
1493		  && SET_SRC (set) == SET_DEST (set)
1494		  && REG_P (SET_SRC (set))
1495		  && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1496		{
1497		  delete_insn (insn);
1498		  /* Delete it from the reload chain.  */
1499		  if (chain->prev)
1500		    chain->prev->next = next;
1501		  else
1502		    reload_insn_chain = next;
1503		  if (next)
1504		    next->prev = chain->prev;
1505		  chain->next = unused_insn_chains;
1506		  unused_insn_chains = chain;
1507		  continue;
1508		}
1509	    }
1510	  if (num_eliminable)
1511	    update_eliminable_offsets ();
1512
1513	  /* Remember for later shortcuts which insns had any reloads or
1514	     register eliminations.  */
1515	  chain->need_elim = did_elimination;
1516	  chain->need_reload = n_reloads > 0;
1517	  chain->need_operand_change = operands_changed;
1518
1519	  /* Discard any register replacements done.  */
1520	  if (did_elimination)
1521	    {
1522	      obstack_free (&reload_obstack, reload_insn_firstobj);
1523	      PATTERN (insn) = old_body;
1524	      INSN_CODE (insn) = old_code;
1525	      REG_NOTES (insn) = old_notes;
1526	      something_needs_elimination = 1;
1527	    }
1528
1529	  something_needs_operands_changed |= operands_changed;
1530
1531	  if (n_reloads != 0)
1532	    {
1533	      copy_reloads (chain);
1534	      *pprev_reload = chain;
1535	      pprev_reload = &chain->next_need_reload;
1536	    }
1537	}
1538    }
1539  *pprev_reload = 0;
1540}
1541
1542/* Comparison function for qsort to decide which of two reloads
1543   should be handled first.  *P1 and *P2 are the reload numbers.  */
1544
1545static int
1546reload_reg_class_lower (const void *r1p, const void *r2p)
1547{
1548  int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1549  int t;
1550
1551  /* Consider required reloads before optional ones.  */
1552  t = rld[r1].optional - rld[r2].optional;
1553  if (t != 0)
1554    return t;
1555
1556  /* Count all solitary classes before non-solitary ones.  */
1557  t = ((reg_class_size[(int) rld[r2].class] == 1)
1558       - (reg_class_size[(int) rld[r1].class] == 1));
1559  if (t != 0)
1560    return t;
1561
1562  /* Aside from solitaires, consider all multi-reg groups first.  */
1563  t = rld[r2].nregs - rld[r1].nregs;
1564  if (t != 0)
1565    return t;
1566
1567  /* Consider reloads in order of increasing reg-class number.  */
1568  t = (int) rld[r1].class - (int) rld[r2].class;
1569  if (t != 0)
1570    return t;
1571
1572  /* If reloads are equally urgent, sort by reload number,
1573     so that the results of qsort leave nothing to chance.  */
1574  return r1 - r2;
1575}
1576
1577/* The cost of spilling each hard reg.  */
1578static int spill_cost[FIRST_PSEUDO_REGISTER];
1579
1580/* When spilling multiple hard registers, we use SPILL_COST for the first
1581   spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1582   only the first hard reg for a multi-reg pseudo.  */
1583static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1584
1585/* Update the spill cost arrays, considering that pseudo REG is live.  */
1586
1587static void
1588count_pseudo (int reg)
1589{
1590  int freq = REG_FREQ (reg);
1591  int r = reg_renumber[reg];
1592  int nregs;
1593
1594  if (REGNO_REG_SET_P (&pseudos_counted, reg)
1595      || REGNO_REG_SET_P (&spilled_pseudos, reg))
1596    return;
1597
1598  SET_REGNO_REG_SET (&pseudos_counted, reg);
1599
1600  gcc_assert (r >= 0);
1601
1602  spill_add_cost[r] += freq;
1603
1604  nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1605  while (nregs-- > 0)
1606    spill_cost[r + nregs] += freq;
1607}
1608
1609/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1610   contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1611
1612static void
1613order_regs_for_reload (struct insn_chain *chain)
1614{
1615  unsigned i;
1616  HARD_REG_SET used_by_pseudos;
1617  HARD_REG_SET used_by_pseudos2;
1618  reg_set_iterator rsi;
1619
1620  COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1621
1622  memset (spill_cost, 0, sizeof spill_cost);
1623  memset (spill_add_cost, 0, sizeof spill_add_cost);
1624
1625  /* Count number of uses of each hard reg by pseudo regs allocated to it
1626     and then order them by decreasing use.  First exclude hard registers
1627     that are live in or across this insn.  */
1628
1629  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1630  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1631  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1632  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1633
1634  /* Now find out which pseudos are allocated to it, and update
1635     hard_reg_n_uses.  */
1636  CLEAR_REG_SET (&pseudos_counted);
1637
1638  EXECUTE_IF_SET_IN_REG_SET
1639    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1640    {
1641      count_pseudo (i);
1642    }
1643  EXECUTE_IF_SET_IN_REG_SET
1644    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1645    {
1646      count_pseudo (i);
1647    }
1648  CLEAR_REG_SET (&pseudos_counted);
1649}
1650
1651/* Vector of reload-numbers showing the order in which the reloads should
1652   be processed.  */
1653static short reload_order[MAX_RELOADS];
1654
1655/* This is used to keep track of the spill regs used in one insn.  */
1656static HARD_REG_SET used_spill_regs_local;
1657
1658/* We decided to spill hard register SPILLED, which has a size of
1659   SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1660   is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1661   update SPILL_COST/SPILL_ADD_COST.  */
1662
1663static void
1664count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1665{
1666  int r = reg_renumber[reg];
1667  int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1668
1669  if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1670      || spilled + spilled_nregs <= r || r + nregs <= spilled)
1671    return;
1672
1673  SET_REGNO_REG_SET (&spilled_pseudos, reg);
1674
1675  spill_add_cost[r] -= REG_FREQ (reg);
1676  while (nregs-- > 0)
1677    spill_cost[r + nregs] -= REG_FREQ (reg);
1678}
1679
1680/* Find reload register to use for reload number ORDER.  */
1681
1682static int
1683find_reg (struct insn_chain *chain, int order)
1684{
1685  int rnum = reload_order[order];
1686  struct reload *rl = rld + rnum;
1687  int best_cost = INT_MAX;
1688  int best_reg = -1;
1689  unsigned int i, j;
1690  int k;
1691  HARD_REG_SET not_usable;
1692  HARD_REG_SET used_by_other_reload;
1693  reg_set_iterator rsi;
1694
1695  COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1696  IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1697  IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1698
1699  CLEAR_HARD_REG_SET (used_by_other_reload);
1700  for (k = 0; k < order; k++)
1701    {
1702      int other = reload_order[k];
1703
1704      if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1705	for (j = 0; j < rld[other].nregs; j++)
1706	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1707    }
1708
1709  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1710    {
1711      unsigned int regno = i;
1712
1713      if (! TEST_HARD_REG_BIT (not_usable, regno)
1714	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1715	  && HARD_REGNO_MODE_OK (regno, rl->mode))
1716	{
1717	  int this_cost = spill_cost[regno];
1718	  int ok = 1;
1719	  unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1720
1721	  for (j = 1; j < this_nregs; j++)
1722	    {
1723	      this_cost += spill_add_cost[regno + j];
1724	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1725		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1726		ok = 0;
1727	    }
1728	  if (! ok)
1729	    continue;
1730	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1731	    this_cost--;
1732	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1733	    this_cost--;
1734	  if (this_cost < best_cost
1735	      /* Among registers with equal cost, prefer caller-saved ones, or
1736		 use REG_ALLOC_ORDER if it is defined.  */
1737	      || (this_cost == best_cost
1738#ifdef REG_ALLOC_ORDER
1739		  && (inv_reg_alloc_order[regno]
1740		      < inv_reg_alloc_order[best_reg])
1741#else
1742		  && call_used_regs[regno]
1743		  && ! call_used_regs[best_reg]
1744#endif
1745		  ))
1746	    {
1747	      best_reg = regno;
1748	      best_cost = this_cost;
1749	    }
1750	}
1751    }
1752  if (best_reg == -1)
1753    return 0;
1754
1755  if (dump_file)
1756    fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1757
1758  rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1759  rl->regno = best_reg;
1760
1761  EXECUTE_IF_SET_IN_REG_SET
1762    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1763    {
1764      count_spilled_pseudo (best_reg, rl->nregs, j);
1765    }
1766
1767  EXECUTE_IF_SET_IN_REG_SET
1768    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1769    {
1770      count_spilled_pseudo (best_reg, rl->nregs, j);
1771    }
1772
1773  for (i = 0; i < rl->nregs; i++)
1774    {
1775      gcc_assert (spill_cost[best_reg + i] == 0);
1776      gcc_assert (spill_add_cost[best_reg + i] == 0);
1777      SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1778    }
1779  return 1;
1780}
1781
1782/* Find more reload regs to satisfy the remaining need of an insn, which
1783   is given by CHAIN.
1784   Do it by ascending class number, since otherwise a reg
1785   might be spilled for a big class and might fail to count
1786   for a smaller class even though it belongs to that class.  */
1787
1788static void
1789find_reload_regs (struct insn_chain *chain)
1790{
1791  int i;
1792
1793  /* In order to be certain of getting the registers we need,
1794     we must sort the reloads into order of increasing register class.
1795     Then our grabbing of reload registers will parallel the process
1796     that provided the reload registers.  */
1797  for (i = 0; i < chain->n_reloads; i++)
1798    {
1799      /* Show whether this reload already has a hard reg.  */
1800      if (chain->rld[i].reg_rtx)
1801	{
1802	  int regno = REGNO (chain->rld[i].reg_rtx);
1803	  chain->rld[i].regno = regno;
1804	  chain->rld[i].nregs
1805	    = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1806	}
1807      else
1808	chain->rld[i].regno = -1;
1809      reload_order[i] = i;
1810    }
1811
1812  n_reloads = chain->n_reloads;
1813  memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1814
1815  CLEAR_HARD_REG_SET (used_spill_regs_local);
1816
1817  if (dump_file)
1818    fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1819
1820  qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1821
1822  /* Compute the order of preference for hard registers to spill.  */
1823
1824  order_regs_for_reload (chain);
1825
1826  for (i = 0; i < n_reloads; i++)
1827    {
1828      int r = reload_order[i];
1829
1830      /* Ignore reloads that got marked inoperative.  */
1831      if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1832	  && ! rld[r].optional
1833	  && rld[r].regno == -1)
1834	if (! find_reg (chain, i))
1835	  {
1836	    spill_failure (chain->insn, rld[r].class);
1837	    failure = 1;
1838	    return;
1839	  }
1840    }
1841
1842  COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1843  IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1844
1845  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1846}
1847
1848static void
1849select_reload_regs (void)
1850{
1851  struct insn_chain *chain;
1852
1853  /* Try to satisfy the needs for each insn.  */
1854  for (chain = insns_need_reload; chain != 0;
1855       chain = chain->next_need_reload)
1856    find_reload_regs (chain);
1857}
1858
1859/* Delete all insns that were inserted by emit_caller_save_insns during
1860   this iteration.  */
1861static void
1862delete_caller_save_insns (void)
1863{
1864  struct insn_chain *c = reload_insn_chain;
1865
1866  while (c != 0)
1867    {
1868      while (c != 0 && c->is_caller_save_insn)
1869	{
1870	  struct insn_chain *next = c->next;
1871	  rtx insn = c->insn;
1872
1873	  if (c == reload_insn_chain)
1874	    reload_insn_chain = next;
1875	  delete_insn (insn);
1876
1877	  if (next)
1878	    next->prev = c->prev;
1879	  if (c->prev)
1880	    c->prev->next = next;
1881	  c->next = unused_insn_chains;
1882	  unused_insn_chains = c;
1883	  c = next;
1884	}
1885      if (c != 0)
1886	c = c->next;
1887    }
1888}
1889
1890/* Handle the failure to find a register to spill.
1891   INSN should be one of the insns which needed this particular spill reg.  */
1892
1893static void
1894spill_failure (rtx insn, enum reg_class class)
1895{
1896  if (asm_noperands (PATTERN (insn)) >= 0)
1897    error_for_asm (insn, "can't find a register in class %qs while "
1898		   "reloading %<asm%>",
1899		   reg_class_names[class]);
1900  else
1901    {
1902      error ("unable to find a register to spill in class %qs",
1903	     reg_class_names[class]);
1904      fatal_insn ("this is the insn:", insn);
1905    }
1906}
1907
1908/* Delete an unneeded INSN and any previous insns who sole purpose is loading
1909   data that is dead in INSN.  */
1910
1911static void
1912delete_dead_insn (rtx insn)
1913{
1914  rtx prev = prev_real_insn (insn);
1915  rtx prev_dest;
1916
1917  /* If the previous insn sets a register that dies in our insn, delete it
1918     too.  */
1919  if (prev && GET_CODE (PATTERN (prev)) == SET
1920      && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
1921      && reg_mentioned_p (prev_dest, PATTERN (insn))
1922      && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1923      && ! side_effects_p (SET_SRC (PATTERN (prev))))
1924    delete_dead_insn (prev);
1925
1926  SET_INSN_DELETED (insn);
1927}
1928
1929/* Modify the home of pseudo-reg I.
1930   The new home is present in reg_renumber[I].
1931
1932   FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1933   or it may be -1, meaning there is none or it is not relevant.
1934   This is used so that all pseudos spilled from a given hard reg
1935   can share one stack slot.  */
1936
1937static void
1938alter_reg (int i, int from_reg)
1939{
1940  /* When outputting an inline function, this can happen
1941     for a reg that isn't actually used.  */
1942  if (regno_reg_rtx[i] == 0)
1943    return;
1944
1945  /* If the reg got changed to a MEM at rtl-generation time,
1946     ignore it.  */
1947  if (!REG_P (regno_reg_rtx[i]))
1948    return;
1949
1950  /* Modify the reg-rtx to contain the new hard reg
1951     number or else to contain its pseudo reg number.  */
1952  REGNO (regno_reg_rtx[i])
1953    = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1954
1955  /* If we have a pseudo that is needed but has no hard reg or equivalent,
1956     allocate a stack slot for it.  */
1957
1958  if (reg_renumber[i] < 0
1959      && REG_N_REFS (i) > 0
1960      && reg_equiv_constant[i] == 0
1961      && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
1962      && reg_equiv_memory_loc[i] == 0)
1963    {
1964      rtx x;
1965      unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
1966      unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
1967      int adjust = 0;
1968
1969      /* Each pseudo reg has an inherent size which comes from its own mode,
1970	 and a total size which provides room for paradoxical subregs
1971	 which refer to the pseudo reg in wider modes.
1972
1973	 We can use a slot already allocated if it provides both
1974	 enough inherent space and enough total space.
1975	 Otherwise, we allocate a new slot, making sure that it has no less
1976	 inherent space, and no less total space, then the previous slot.  */
1977      if (from_reg == -1)
1978	{
1979	  /* No known place to spill from => no slot to reuse.  */
1980	  x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
1981				  inherent_size == total_size ? 0 : -1);
1982	  if (BYTES_BIG_ENDIAN)
1983	    /* Cancel the  big-endian correction done in assign_stack_local.
1984	       Get the address of the beginning of the slot.
1985	       This is so we can do a big-endian correction unconditionally
1986	       below.  */
1987	    adjust = inherent_size - total_size;
1988
1989	  /* Nothing can alias this slot except this pseudo.  */
1990	  set_mem_alias_set (x, new_alias_set ());
1991	}
1992
1993      /* Reuse a stack slot if possible.  */
1994      else if (spill_stack_slot[from_reg] != 0
1995	       && spill_stack_slot_width[from_reg] >= total_size
1996	       && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
1997		   >= inherent_size))
1998	x = spill_stack_slot[from_reg];
1999
2000      /* Allocate a bigger slot.  */
2001      else
2002	{
2003	  /* Compute maximum size needed, both for inherent size
2004	     and for total size.  */
2005	  enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2006	  rtx stack_slot;
2007
2008	  if (spill_stack_slot[from_reg])
2009	    {
2010	      if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2011		  > inherent_size)
2012		mode = GET_MODE (spill_stack_slot[from_reg]);
2013	      if (spill_stack_slot_width[from_reg] > total_size)
2014		total_size = spill_stack_slot_width[from_reg];
2015	    }
2016
2017	  /* Make a slot with that size.  */
2018	  x = assign_stack_local (mode, total_size,
2019				  inherent_size == total_size ? 0 : -1);
2020	  stack_slot = x;
2021
2022	  /* All pseudos mapped to this slot can alias each other.  */
2023	  if (spill_stack_slot[from_reg])
2024	    set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2025	  else
2026	    set_mem_alias_set (x, new_alias_set ());
2027
2028	  if (BYTES_BIG_ENDIAN)
2029	    {
2030	      /* Cancel the  big-endian correction done in assign_stack_local.
2031		 Get the address of the beginning of the slot.
2032		 This is so we can do a big-endian correction unconditionally
2033		 below.  */
2034	      adjust = GET_MODE_SIZE (mode) - total_size;
2035	      if (adjust)
2036		stack_slot
2037		  = adjust_address_nv (x, mode_for_size (total_size
2038							 * BITS_PER_UNIT,
2039							 MODE_INT, 1),
2040				       adjust);
2041	    }
2042
2043	  spill_stack_slot[from_reg] = stack_slot;
2044	  spill_stack_slot_width[from_reg] = total_size;
2045	}
2046
2047      /* On a big endian machine, the "address" of the slot
2048	 is the address of the low part that fits its inherent mode.  */
2049      if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2050	adjust += (total_size - inherent_size);
2051
2052      /* If we have any adjustment to make, or if the stack slot is the
2053	 wrong mode, make a new stack slot.  */
2054      x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2055
2056      /* If we have a decl for the original register, set it for the
2057	 memory.  If this is a shared MEM, make a copy.  */
2058      if (REG_EXPR (regno_reg_rtx[i])
2059	  && DECL_P (REG_EXPR (regno_reg_rtx[i])))
2060	{
2061	  rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2062
2063	  /* We can do this only for the DECLs home pseudo, not for
2064	     any copies of it, since otherwise when the stack slot
2065	     is reused, nonoverlapping_memrefs_p might think they
2066	     cannot overlap.  */
2067	  if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
2068	    {
2069	      if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2070		x = copy_rtx (x);
2071
2072	      set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2073	    }
2074	}
2075
2076      /* Save the stack slot for later.  */
2077      reg_equiv_memory_loc[i] = x;
2078    }
2079}
2080
2081/* Mark the slots in regs_ever_live for the hard regs
2082   used by pseudo-reg number REGNO.  */
2083
2084void
2085mark_home_live (int regno)
2086{
2087  int i, lim;
2088
2089  i = reg_renumber[regno];
2090  if (i < 0)
2091    return;
2092  lim = i + hard_regno_nregs[i][PSEUDO_REGNO_MODE (regno)];
2093  while (i < lim)
2094    regs_ever_live[i++] = 1;
2095}
2096
2097/* This function handles the tracking of elimination offsets around branches.
2098
2099   X is a piece of RTL being scanned.
2100
2101   INSN is the insn that it came from, if any.
2102
2103   INITIAL_P is nonzero if we are to set the offset to be the initial
2104   offset and zero if we are setting the offset of the label to be the
2105   current offset.  */
2106
2107static void
2108set_label_offsets (rtx x, rtx insn, int initial_p)
2109{
2110  enum rtx_code code = GET_CODE (x);
2111  rtx tem;
2112  unsigned int i;
2113  struct elim_table *p;
2114
2115  switch (code)
2116    {
2117    case LABEL_REF:
2118      if (LABEL_REF_NONLOCAL_P (x))
2119	return;
2120
2121      x = XEXP (x, 0);
2122
2123      /* ... fall through ...  */
2124
2125    case CODE_LABEL:
2126      /* If we know nothing about this label, set the desired offsets.  Note
2127	 that this sets the offset at a label to be the offset before a label
2128	 if we don't know anything about the label.  This is not correct for
2129	 the label after a BARRIER, but is the best guess we can make.  If
2130	 we guessed wrong, we will suppress an elimination that might have
2131	 been possible had we been able to guess correctly.  */
2132
2133      if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2134	{
2135	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2136	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2137	      = (initial_p ? reg_eliminate[i].initial_offset
2138		 : reg_eliminate[i].offset);
2139	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2140	}
2141
2142      /* Otherwise, if this is the definition of a label and it is
2143	 preceded by a BARRIER, set our offsets to the known offset of
2144	 that label.  */
2145
2146      else if (x == insn
2147	       && (tem = prev_nonnote_insn (insn)) != 0
2148	       && BARRIER_P (tem))
2149	set_offsets_for_label (insn);
2150      else
2151	/* If neither of the above cases is true, compare each offset
2152	   with those previously recorded and suppress any eliminations
2153	   where the offsets disagree.  */
2154
2155	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2156	  if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2157	      != (initial_p ? reg_eliminate[i].initial_offset
2158		  : reg_eliminate[i].offset))
2159	    reg_eliminate[i].can_eliminate = 0;
2160
2161      return;
2162
2163    case JUMP_INSN:
2164      set_label_offsets (PATTERN (insn), insn, initial_p);
2165
2166      /* ... fall through ...  */
2167
2168    case INSN:
2169    case CALL_INSN:
2170      /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2171	 and hence must have all eliminations at their initial offsets.  */
2172      for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2173	if (REG_NOTE_KIND (tem) == REG_LABEL)
2174	  set_label_offsets (XEXP (tem, 0), insn, 1);
2175      return;
2176
2177    case PARALLEL:
2178    case ADDR_VEC:
2179    case ADDR_DIFF_VEC:
2180      /* Each of the labels in the parallel or address vector must be
2181	 at their initial offsets.  We want the first field for PARALLEL
2182	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2183
2184      for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2185	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2186			   insn, initial_p);
2187      return;
2188
2189    case SET:
2190      /* We only care about setting PC.  If the source is not RETURN,
2191	 IF_THEN_ELSE, or a label, disable any eliminations not at
2192	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2193	 isn't one of those possibilities.  For branches to a label,
2194	 call ourselves recursively.
2195
2196	 Note that this can disable elimination unnecessarily when we have
2197	 a non-local goto since it will look like a non-constant jump to
2198	 someplace in the current function.  This isn't a significant
2199	 problem since such jumps will normally be when all elimination
2200	 pairs are back to their initial offsets.  */
2201
2202      if (SET_DEST (x) != pc_rtx)
2203	return;
2204
2205      switch (GET_CODE (SET_SRC (x)))
2206	{
2207	case PC:
2208	case RETURN:
2209	  return;
2210
2211	case LABEL_REF:
2212	  set_label_offsets (SET_SRC (x), insn, initial_p);
2213	  return;
2214
2215	case IF_THEN_ELSE:
2216	  tem = XEXP (SET_SRC (x), 1);
2217	  if (GET_CODE (tem) == LABEL_REF)
2218	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2219	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2220	    break;
2221
2222	  tem = XEXP (SET_SRC (x), 2);
2223	  if (GET_CODE (tem) == LABEL_REF)
2224	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2225	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2226	    break;
2227	  return;
2228
2229	default:
2230	  break;
2231	}
2232
2233      /* If we reach here, all eliminations must be at their initial
2234	 offset because we are doing a jump to a variable address.  */
2235      for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2236	if (p->offset != p->initial_offset)
2237	  p->can_eliminate = 0;
2238      break;
2239
2240    default:
2241      break;
2242    }
2243}
2244
2245/* Scan X and replace any eliminable registers (such as fp) with a
2246   replacement (such as sp), plus an offset.
2247
2248   MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2249   much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2250   MEM, we are allowed to replace a sum of a register and the constant zero
2251   with the register, which we cannot do outside a MEM.  In addition, we need
2252   to record the fact that a register is referenced outside a MEM.
2253
2254   If INSN is an insn, it is the insn containing X.  If we replace a REG
2255   in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2256   CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2257   the REG is being modified.
2258
2259   Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2260   That's used when we eliminate in expressions stored in notes.
2261   This means, do not set ref_outside_mem even if the reference
2262   is outside of MEMs.
2263
2264   REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2265   replacements done assuming all offsets are at their initial values.  If
2266   they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2267   encounter, return the actual location so that find_reloads will do
2268   the proper thing.  */
2269
2270static rtx
2271eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2272		  bool may_use_invariant)
2273{
2274  enum rtx_code code = GET_CODE (x);
2275  struct elim_table *ep;
2276  int regno;
2277  rtx new;
2278  int i, j;
2279  const char *fmt;
2280  int copied = 0;
2281
2282  if (! current_function_decl)
2283    return x;
2284
2285  switch (code)
2286    {
2287    case CONST_INT:
2288    case CONST_DOUBLE:
2289    case CONST_VECTOR:
2290    case CONST:
2291    case SYMBOL_REF:
2292    case CODE_LABEL:
2293    case PC:
2294    case CC0:
2295    case ASM_INPUT:
2296    case ADDR_VEC:
2297    case ADDR_DIFF_VEC:
2298    case RETURN:
2299      return x;
2300
2301    case REG:
2302      regno = REGNO (x);
2303
2304      /* First handle the case where we encounter a bare register that
2305	 is eliminable.  Replace it with a PLUS.  */
2306      if (regno < FIRST_PSEUDO_REGISTER)
2307	{
2308	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2309	       ep++)
2310	    if (ep->from_rtx == x && ep->can_eliminate)
2311	      return plus_constant (ep->to_rtx, ep->previous_offset);
2312
2313	}
2314      else if (reg_renumber && reg_renumber[regno] < 0
2315	       && reg_equiv_invariant && reg_equiv_invariant[regno])
2316	{
2317	  if (may_use_invariant)
2318	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2319			             mem_mode, insn, true);
2320	  /* There exists at least one use of REGNO that cannot be
2321	     eliminated.  Prevent the defining insn from being deleted.  */
2322	  reg_equiv_init[regno] = NULL_RTX;
2323	  alter_reg (regno, -1);
2324	}
2325      return x;
2326
2327    /* You might think handling MINUS in a manner similar to PLUS is a
2328       good idea.  It is not.  It has been tried multiple times and every
2329       time the change has had to have been reverted.
2330
2331       Other parts of reload know a PLUS is special (gen_reload for example)
2332       and require special code to handle code a reloaded PLUS operand.
2333
2334       Also consider backends where the flags register is clobbered by a
2335       MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2336       lea instruction comes to mind).  If we try to reload a MINUS, we
2337       may kill the flags register that was holding a useful value.
2338
2339       So, please before trying to handle MINUS, consider reload as a
2340       whole instead of this little section as well as the backend issues.  */
2341    case PLUS:
2342      /* If this is the sum of an eliminable register and a constant, rework
2343	 the sum.  */
2344      if (REG_P (XEXP (x, 0))
2345	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2346	  && CONSTANT_P (XEXP (x, 1)))
2347	{
2348	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2349	       ep++)
2350	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2351	      {
2352		/* The only time we want to replace a PLUS with a REG (this
2353		   occurs when the constant operand of the PLUS is the negative
2354		   of the offset) is when we are inside a MEM.  We won't want
2355		   to do so at other times because that would change the
2356		   structure of the insn in a way that reload can't handle.
2357		   We special-case the commonest situation in
2358		   eliminate_regs_in_insn, so just replace a PLUS with a
2359		   PLUS here, unless inside a MEM.  */
2360		if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2361		    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2362		  return ep->to_rtx;
2363		else
2364		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2365				       plus_constant (XEXP (x, 1),
2366						      ep->previous_offset));
2367	      }
2368
2369	  /* If the register is not eliminable, we are done since the other
2370	     operand is a constant.  */
2371	  return x;
2372	}
2373
2374      /* If this is part of an address, we want to bring any constant to the
2375	 outermost PLUS.  We will do this by doing register replacement in
2376	 our operands and seeing if a constant shows up in one of them.
2377
2378	 Note that there is no risk of modifying the structure of the insn,
2379	 since we only get called for its operands, thus we are either
2380	 modifying the address inside a MEM, or something like an address
2381	 operand of a load-address insn.  */
2382
2383      {
2384	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2385	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2386
2387	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2388	  {
2389	    /* If one side is a PLUS and the other side is a pseudo that
2390	       didn't get a hard register but has a reg_equiv_constant,
2391	       we must replace the constant here since it may no longer
2392	       be in the position of any operand.  */
2393	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2394		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2395		&& reg_renumber[REGNO (new1)] < 0
2396		&& reg_equiv_constant != 0
2397		&& reg_equiv_constant[REGNO (new1)] != 0)
2398	      new1 = reg_equiv_constant[REGNO (new1)];
2399	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2400		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2401		     && reg_renumber[REGNO (new0)] < 0
2402		     && reg_equiv_constant[REGNO (new0)] != 0)
2403	      new0 = reg_equiv_constant[REGNO (new0)];
2404
2405	    new = form_sum (new0, new1);
2406
2407	    /* As above, if we are not inside a MEM we do not want to
2408	       turn a PLUS into something else.  We might try to do so here
2409	       for an addition of 0 if we aren't optimizing.  */
2410	    if (! mem_mode && GET_CODE (new) != PLUS)
2411	      return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2412	    else
2413	      return new;
2414	  }
2415      }
2416      return x;
2417
2418    case MULT:
2419      /* If this is the product of an eliminable register and a
2420	 constant, apply the distribute law and move the constant out
2421	 so that we have (plus (mult ..) ..).  This is needed in order
2422	 to keep load-address insns valid.   This case is pathological.
2423	 We ignore the possibility of overflow here.  */
2424      if (REG_P (XEXP (x, 0))
2425	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2426	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
2427	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2428	     ep++)
2429	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2430	    {
2431	      if (! mem_mode
2432		  /* Refs inside notes don't count for this purpose.  */
2433		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2434				      || GET_CODE (insn) == INSN_LIST)))
2435		ep->ref_outside_mem = 1;
2436
2437	      return
2438		plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2439			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2440	    }
2441
2442      /* ... fall through ...  */
2443
2444    case CALL:
2445    case COMPARE:
2446    /* See comments before PLUS about handling MINUS.  */
2447    case MINUS:
2448    case DIV:      case UDIV:
2449    case MOD:      case UMOD:
2450    case AND:      case IOR:      case XOR:
2451    case ROTATERT: case ROTATE:
2452    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2453    case NE:       case EQ:
2454    case GE:       case GT:       case GEU:    case GTU:
2455    case LE:       case LT:       case LEU:    case LTU:
2456      {
2457	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2458	rtx new1 = XEXP (x, 1)
2459		   ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2460
2461	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2462	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2463      }
2464      return x;
2465
2466    case EXPR_LIST:
2467      /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2468      if (XEXP (x, 0))
2469	{
2470	  new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2471	  if (new != XEXP (x, 0))
2472	    {
2473	      /* If this is a REG_DEAD note, it is not valid anymore.
2474		 Using the eliminated version could result in creating a
2475		 REG_DEAD note for the stack or frame pointer.  */
2476	      if (GET_MODE (x) == REG_DEAD)
2477		return (XEXP (x, 1)
2478			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2479			: NULL_RTX);
2480
2481	      x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2482	    }
2483	}
2484
2485      /* ... fall through ...  */
2486
2487    case INSN_LIST:
2488      /* Now do eliminations in the rest of the chain.  If this was
2489	 an EXPR_LIST, this might result in allocating more memory than is
2490	 strictly needed, but it simplifies the code.  */
2491      if (XEXP (x, 1))
2492	{
2493	  new = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2494	  if (new != XEXP (x, 1))
2495	    return
2496	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2497	}
2498      return x;
2499
2500    case PRE_INC:
2501    case POST_INC:
2502    case PRE_DEC:
2503    case POST_DEC:
2504    case STRICT_LOW_PART:
2505    case NEG:          case NOT:
2506    case SIGN_EXTEND:  case ZERO_EXTEND:
2507    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2508    case FLOAT:        case FIX:
2509    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2510    case ABS:
2511    case SQRT:
2512    case FFS:
2513    case CLZ:
2514    case CTZ:
2515    case POPCOUNT:
2516    case PARITY:
2517      new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2518      if (new != XEXP (x, 0))
2519	return gen_rtx_fmt_e (code, GET_MODE (x), new);
2520      return x;
2521
2522    case SUBREG:
2523      /* Similar to above processing, but preserve SUBREG_BYTE.
2524	 Convert (subreg (mem)) to (mem) if not paradoxical.
2525	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2526	 pseudo didn't get a hard reg, we must replace this with the
2527	 eliminated version of the memory location because push_reload
2528	 may do the replacement in certain circumstances.  */
2529      if (REG_P (SUBREG_REG (x))
2530	  && (GET_MODE_SIZE (GET_MODE (x))
2531	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2532	  && reg_equiv_memory_loc != 0
2533	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2534	{
2535	  new = SUBREG_REG (x);
2536	}
2537      else
2538	new = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2539
2540      if (new != SUBREG_REG (x))
2541	{
2542	  int x_size = GET_MODE_SIZE (GET_MODE (x));
2543	  int new_size = GET_MODE_SIZE (GET_MODE (new));
2544
2545	  if (MEM_P (new)
2546	      && ((x_size < new_size
2547#ifdef WORD_REGISTER_OPERATIONS
2548		   /* On these machines, combine can create rtl of the form
2549		      (set (subreg:m1 (reg:m2 R) 0) ...)
2550		      where m1 < m2, and expects something interesting to
2551		      happen to the entire word.  Moreover, it will use the
2552		      (reg:m2 R) later, expecting all bits to be preserved.
2553		      So if the number of words is the same, preserve the
2554		      subreg so that push_reload can see it.  */
2555		   && ! ((x_size - 1) / UNITS_PER_WORD
2556			 == (new_size -1 ) / UNITS_PER_WORD)
2557#endif
2558		   )
2559		  || x_size == new_size)
2560	      )
2561	    return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2562	  else
2563	    return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2564	}
2565
2566      return x;
2567
2568    case MEM:
2569      /* Our only special processing is to pass the mode of the MEM to our
2570	 recursive call and copy the flags.  While we are here, handle this
2571	 case more efficiently.  */
2572      return
2573	replace_equiv_address_nv (x,
2574				  eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2575						    insn, true));
2576
2577    case USE:
2578      /* Handle insn_list USE that a call to a pure function may generate.  */
2579      new = eliminate_regs_1 (XEXP (x, 0), 0, insn, false);
2580      if (new != XEXP (x, 0))
2581	return gen_rtx_USE (GET_MODE (x), new);
2582      return x;
2583
2584    case CLOBBER:
2585    case ASM_OPERANDS:
2586    case SET:
2587      gcc_unreachable ();
2588
2589    default:
2590      break;
2591    }
2592
2593  /* Process each of our operands recursively.  If any have changed, make a
2594     copy of the rtx.  */
2595  fmt = GET_RTX_FORMAT (code);
2596  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2597    {
2598      if (*fmt == 'e')
2599	{
2600	  new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2601	  if (new != XEXP (x, i) && ! copied)
2602	    {
2603	      rtx new_x = rtx_alloc (code);
2604	      memcpy (new_x, x, RTX_SIZE (code));
2605	      x = new_x;
2606	      copied = 1;
2607	    }
2608	  XEXP (x, i) = new;
2609	}
2610      else if (*fmt == 'E')
2611	{
2612	  int copied_vec = 0;
2613	  for (j = 0; j < XVECLEN (x, i); j++)
2614	    {
2615	      new = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2616	      if (new != XVECEXP (x, i, j) && ! copied_vec)
2617		{
2618		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2619					     XVEC (x, i)->elem);
2620		  if (! copied)
2621		    {
2622		      rtx new_x = rtx_alloc (code);
2623		      memcpy (new_x, x, RTX_SIZE (code));
2624		      x = new_x;
2625		      copied = 1;
2626		    }
2627		  XVEC (x, i) = new_v;
2628		  copied_vec = 1;
2629		}
2630	      XVECEXP (x, i, j) = new;
2631	    }
2632	}
2633    }
2634
2635  return x;
2636}
2637
2638rtx
2639eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2640{
2641  return eliminate_regs_1 (x, mem_mode, insn, false);
2642}
2643
2644/* Scan rtx X for modifications of elimination target registers.  Update
2645   the table of eliminables to reflect the changed state.  MEM_MODE is
2646   the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2647
2648static void
2649elimination_effects (rtx x, enum machine_mode mem_mode)
2650{
2651  enum rtx_code code = GET_CODE (x);
2652  struct elim_table *ep;
2653  int regno;
2654  int i, j;
2655  const char *fmt;
2656
2657  switch (code)
2658    {
2659    case CONST_INT:
2660    case CONST_DOUBLE:
2661    case CONST_VECTOR:
2662    case CONST:
2663    case SYMBOL_REF:
2664    case CODE_LABEL:
2665    case PC:
2666    case CC0:
2667    case ASM_INPUT:
2668    case ADDR_VEC:
2669    case ADDR_DIFF_VEC:
2670    case RETURN:
2671      return;
2672
2673    case REG:
2674      regno = REGNO (x);
2675
2676      /* First handle the case where we encounter a bare register that
2677	 is eliminable.  Replace it with a PLUS.  */
2678      if (regno < FIRST_PSEUDO_REGISTER)
2679	{
2680	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2681	       ep++)
2682	    if (ep->from_rtx == x && ep->can_eliminate)
2683	      {
2684		if (! mem_mode)
2685		  ep->ref_outside_mem = 1;
2686		return;
2687	      }
2688
2689	}
2690      else if (reg_renumber[regno] < 0 && reg_equiv_constant
2691	       && reg_equiv_constant[regno]
2692	       && ! function_invariant_p (reg_equiv_constant[regno]))
2693	elimination_effects (reg_equiv_constant[regno], mem_mode);
2694      return;
2695
2696    case PRE_INC:
2697    case POST_INC:
2698    case PRE_DEC:
2699    case POST_DEC:
2700    case POST_MODIFY:
2701    case PRE_MODIFY:
2702      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2703	if (ep->to_rtx == XEXP (x, 0))
2704	  {
2705	    int size = GET_MODE_SIZE (mem_mode);
2706
2707	    /* If more bytes than MEM_MODE are pushed, account for them.  */
2708#ifdef PUSH_ROUNDING
2709	    if (ep->to_rtx == stack_pointer_rtx)
2710	      size = PUSH_ROUNDING (size);
2711#endif
2712	    if (code == PRE_DEC || code == POST_DEC)
2713	      ep->offset += size;
2714	    else if (code == PRE_INC || code == POST_INC)
2715	      ep->offset -= size;
2716	    else if ((code == PRE_MODIFY || code == POST_MODIFY)
2717		     && GET_CODE (XEXP (x, 1)) == PLUS
2718		     && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2719		     && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2720	      ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2721	  }
2722
2723      /* These two aren't unary operators.  */
2724      if (code == POST_MODIFY || code == PRE_MODIFY)
2725	break;
2726
2727      /* Fall through to generic unary operation case.  */
2728    case STRICT_LOW_PART:
2729    case NEG:          case NOT:
2730    case SIGN_EXTEND:  case ZERO_EXTEND:
2731    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2732    case FLOAT:        case FIX:
2733    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2734    case ABS:
2735    case SQRT:
2736    case FFS:
2737    case CLZ:
2738    case CTZ:
2739    case POPCOUNT:
2740    case PARITY:
2741      elimination_effects (XEXP (x, 0), mem_mode);
2742      return;
2743
2744    case SUBREG:
2745      if (REG_P (SUBREG_REG (x))
2746	  && (GET_MODE_SIZE (GET_MODE (x))
2747	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2748	  && reg_equiv_memory_loc != 0
2749	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2750	return;
2751
2752      elimination_effects (SUBREG_REG (x), mem_mode);
2753      return;
2754
2755    case USE:
2756      /* If using a register that is the source of an eliminate we still
2757	 think can be performed, note it cannot be performed since we don't
2758	 know how this register is used.  */
2759      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2760	if (ep->from_rtx == XEXP (x, 0))
2761	  ep->can_eliminate = 0;
2762
2763      elimination_effects (XEXP (x, 0), mem_mode);
2764      return;
2765
2766    case CLOBBER:
2767      /* If clobbering a register that is the replacement register for an
2768	 elimination we still think can be performed, note that it cannot
2769	 be performed.  Otherwise, we need not be concerned about it.  */
2770      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2771	if (ep->to_rtx == XEXP (x, 0))
2772	  ep->can_eliminate = 0;
2773
2774      elimination_effects (XEXP (x, 0), mem_mode);
2775      return;
2776
2777    case SET:
2778      /* Check for setting a register that we know about.  */
2779      if (REG_P (SET_DEST (x)))
2780	{
2781	  /* See if this is setting the replacement register for an
2782	     elimination.
2783
2784	     If DEST is the hard frame pointer, we do nothing because we
2785	     assume that all assignments to the frame pointer are for
2786	     non-local gotos and are being done at a time when they are valid
2787	     and do not disturb anything else.  Some machines want to
2788	     eliminate a fake argument pointer (or even a fake frame pointer)
2789	     with either the real frame or the stack pointer.  Assignments to
2790	     the hard frame pointer must not prevent this elimination.  */
2791
2792	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2793	       ep++)
2794	    if (ep->to_rtx == SET_DEST (x)
2795		&& SET_DEST (x) != hard_frame_pointer_rtx)
2796	      {
2797		/* If it is being incremented, adjust the offset.  Otherwise,
2798		   this elimination can't be done.  */
2799		rtx src = SET_SRC (x);
2800
2801		if (GET_CODE (src) == PLUS
2802		    && XEXP (src, 0) == SET_DEST (x)
2803		    && GET_CODE (XEXP (src, 1)) == CONST_INT)
2804		  ep->offset -= INTVAL (XEXP (src, 1));
2805		else
2806		  ep->can_eliminate = 0;
2807	      }
2808	}
2809
2810      elimination_effects (SET_DEST (x), 0);
2811      elimination_effects (SET_SRC (x), 0);
2812      return;
2813
2814    case MEM:
2815      /* Our only special processing is to pass the mode of the MEM to our
2816	 recursive call.  */
2817      elimination_effects (XEXP (x, 0), GET_MODE (x));
2818      return;
2819
2820    default:
2821      break;
2822    }
2823
2824  fmt = GET_RTX_FORMAT (code);
2825  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2826    {
2827      if (*fmt == 'e')
2828	elimination_effects (XEXP (x, i), mem_mode);
2829      else if (*fmt == 'E')
2830	for (j = 0; j < XVECLEN (x, i); j++)
2831	  elimination_effects (XVECEXP (x, i, j), mem_mode);
2832    }
2833}
2834
2835/* Descend through rtx X and verify that no references to eliminable registers
2836   remain.  If any do remain, mark the involved register as not
2837   eliminable.  */
2838
2839static void
2840check_eliminable_occurrences (rtx x)
2841{
2842  const char *fmt;
2843  int i;
2844  enum rtx_code code;
2845
2846  if (x == 0)
2847    return;
2848
2849  code = GET_CODE (x);
2850
2851  if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2852    {
2853      struct elim_table *ep;
2854
2855      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2856	if (ep->from_rtx == x)
2857	  ep->can_eliminate = 0;
2858      return;
2859    }
2860
2861  fmt = GET_RTX_FORMAT (code);
2862  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2863    {
2864      if (*fmt == 'e')
2865	check_eliminable_occurrences (XEXP (x, i));
2866      else if (*fmt == 'E')
2867	{
2868	  int j;
2869	  for (j = 0; j < XVECLEN (x, i); j++)
2870	    check_eliminable_occurrences (XVECEXP (x, i, j));
2871	}
2872    }
2873}
2874
2875/* Scan INSN and eliminate all eliminable registers in it.
2876
2877   If REPLACE is nonzero, do the replacement destructively.  Also
2878   delete the insn as dead it if it is setting an eliminable register.
2879
2880   If REPLACE is zero, do all our allocations in reload_obstack.
2881
2882   If no eliminations were done and this insn doesn't require any elimination
2883   processing (these are not identical conditions: it might be updating sp,
2884   but not referencing fp; this needs to be seen during reload_as_needed so
2885   that the offset between fp and sp can be taken into consideration), zero
2886   is returned.  Otherwise, 1 is returned.  */
2887
2888static int
2889eliminate_regs_in_insn (rtx insn, int replace)
2890{
2891  int icode = recog_memoized (insn);
2892  rtx old_body = PATTERN (insn);
2893  int insn_is_asm = asm_noperands (old_body) >= 0;
2894  rtx old_set = single_set (insn);
2895  rtx new_body;
2896  int val = 0;
2897  int i;
2898  rtx substed_operand[MAX_RECOG_OPERANDS];
2899  rtx orig_operand[MAX_RECOG_OPERANDS];
2900  struct elim_table *ep;
2901  rtx plus_src, plus_cst_src;
2902
2903  if (! insn_is_asm && icode < 0)
2904    {
2905      gcc_assert (GET_CODE (PATTERN (insn)) == USE
2906		  || GET_CODE (PATTERN (insn)) == CLOBBER
2907		  || GET_CODE (PATTERN (insn)) == ADDR_VEC
2908		  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2909		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2910      return 0;
2911    }
2912
2913  if (old_set != 0 && REG_P (SET_DEST (old_set))
2914      && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2915    {
2916      /* Check for setting an eliminable register.  */
2917      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2918	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2919	  {
2920#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2921	    /* If this is setting the frame pointer register to the
2922	       hardware frame pointer register and this is an elimination
2923	       that will be done (tested above), this insn is really
2924	       adjusting the frame pointer downward to compensate for
2925	       the adjustment done before a nonlocal goto.  */
2926	    if (ep->from == FRAME_POINTER_REGNUM
2927		&& ep->to == HARD_FRAME_POINTER_REGNUM)
2928	      {
2929		rtx base = SET_SRC (old_set);
2930		rtx base_insn = insn;
2931		HOST_WIDE_INT offset = 0;
2932
2933		while (base != ep->to_rtx)
2934		  {
2935		    rtx prev_insn, prev_set;
2936
2937		    if (GET_CODE (base) == PLUS
2938		        && GET_CODE (XEXP (base, 1)) == CONST_INT)
2939		      {
2940		        offset += INTVAL (XEXP (base, 1));
2941		        base = XEXP (base, 0);
2942		      }
2943		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2944			     && (prev_set = single_set (prev_insn)) != 0
2945			     && rtx_equal_p (SET_DEST (prev_set), base))
2946		      {
2947		        base = SET_SRC (prev_set);
2948		        base_insn = prev_insn;
2949		      }
2950		    else
2951		      break;
2952		  }
2953
2954		if (base == ep->to_rtx)
2955		  {
2956		    rtx src
2957		      = plus_constant (ep->to_rtx, offset - ep->offset);
2958
2959		    new_body = old_body;
2960		    if (! replace)
2961		      {
2962			new_body = copy_insn (old_body);
2963			if (REG_NOTES (insn))
2964			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
2965		      }
2966		    PATTERN (insn) = new_body;
2967		    old_set = single_set (insn);
2968
2969		    /* First see if this insn remains valid when we
2970		       make the change.  If not, keep the INSN_CODE
2971		       the same and let reload fit it up.  */
2972		    validate_change (insn, &SET_SRC (old_set), src, 1);
2973		    validate_change (insn, &SET_DEST (old_set),
2974				     ep->to_rtx, 1);
2975		    if (! apply_change_group ())
2976		      {
2977			SET_SRC (old_set) = src;
2978			SET_DEST (old_set) = ep->to_rtx;
2979		      }
2980
2981		    val = 1;
2982		    goto done;
2983		  }
2984	      }
2985#endif
2986
2987	    /* In this case this insn isn't serving a useful purpose.  We
2988	       will delete it in reload_as_needed once we know that this
2989	       elimination is, in fact, being done.
2990
2991	       If REPLACE isn't set, we can't delete this insn, but needn't
2992	       process it since it won't be used unless something changes.  */
2993	    if (replace)
2994	      {
2995		delete_dead_insn (insn);
2996		return 1;
2997	      }
2998	    val = 1;
2999	    goto done;
3000	  }
3001    }
3002
3003  /* We allow one special case which happens to work on all machines we
3004     currently support: a single set with the source or a REG_EQUAL
3005     note being a PLUS of an eliminable register and a constant.  */
3006  plus_src = plus_cst_src = 0;
3007  if (old_set && REG_P (SET_DEST (old_set)))
3008    {
3009      if (GET_CODE (SET_SRC (old_set)) == PLUS)
3010	plus_src = SET_SRC (old_set);
3011      /* First see if the source is of the form (plus (...) CST).  */
3012      if (plus_src
3013	  && GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
3014	plus_cst_src = plus_src;
3015      else if (REG_P (SET_SRC (old_set))
3016	       || plus_src)
3017	{
3018	  /* Otherwise, see if we have a REG_EQUAL note of the form
3019	     (plus (...) CST).  */
3020	  rtx links;
3021	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3022	    {
3023	      if (REG_NOTE_KIND (links) == REG_EQUAL
3024		  && GET_CODE (XEXP (links, 0)) == PLUS
3025		  && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
3026		{
3027		  plus_cst_src = XEXP (links, 0);
3028		  break;
3029		}
3030	    }
3031	}
3032
3033      /* Check that the first operand of the PLUS is a hard reg or
3034	 the lowpart subreg of one.  */
3035      if (plus_cst_src)
3036	{
3037	  rtx reg = XEXP (plus_cst_src, 0);
3038	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3039	    reg = SUBREG_REG (reg);
3040
3041	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3042	    plus_cst_src = 0;
3043	}
3044    }
3045  if (plus_cst_src)
3046    {
3047      rtx reg = XEXP (plus_cst_src, 0);
3048      HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3049
3050      if (GET_CODE (reg) == SUBREG)
3051	reg = SUBREG_REG (reg);
3052
3053      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3054	if (ep->from_rtx == reg && ep->can_eliminate)
3055	  {
3056	    rtx to_rtx = ep->to_rtx;
3057	    offset += ep->offset;
3058	    offset = trunc_int_for_mode (offset, GET_MODE (reg));
3059
3060	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3061	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3062				    to_rtx);
3063	    /* If we have a nonzero offset, and the source is already
3064	       a simple REG, the following transformation would
3065	       increase the cost of the insn by replacing a simple REG
3066	       with (plus (reg sp) CST).  So try only when we already
3067	       had a PLUS before.  */
3068	    if (offset == 0 || plus_src)
3069	      {
3070		rtx new_src = plus_constant (to_rtx, offset);
3071
3072		new_body = old_body;
3073		if (! replace)
3074		  {
3075		    new_body = copy_insn (old_body);
3076		    if (REG_NOTES (insn))
3077		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3078		  }
3079		PATTERN (insn) = new_body;
3080		old_set = single_set (insn);
3081
3082		/* First see if this insn remains valid when we make the
3083		   change.  If not, try to replace the whole pattern with
3084		   a simple set (this may help if the original insn was a
3085		   PARALLEL that was only recognized as single_set due to
3086		   REG_UNUSED notes).  If this isn't valid either, keep
3087		   the INSN_CODE the same and let reload fix it up.  */
3088		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3089		  {
3090		    rtx new_pat = gen_rtx_SET (VOIDmode,
3091					       SET_DEST (old_set), new_src);
3092
3093		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3094		      SET_SRC (old_set) = new_src;
3095		  }
3096	      }
3097	    else
3098	      break;
3099
3100	    val = 1;
3101	    /* This can't have an effect on elimination offsets, so skip right
3102	       to the end.  */
3103	    goto done;
3104	  }
3105    }
3106
3107  /* Determine the effects of this insn on elimination offsets.  */
3108  elimination_effects (old_body, 0);
3109
3110  /* Eliminate all eliminable registers occurring in operands that
3111     can be handled by reload.  */
3112  extract_insn (insn);
3113  for (i = 0; i < recog_data.n_operands; i++)
3114    {
3115      orig_operand[i] = recog_data.operand[i];
3116      substed_operand[i] = recog_data.operand[i];
3117
3118      /* For an asm statement, every operand is eliminable.  */
3119      if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3120	{
3121	  bool is_set_src, in_plus;
3122
3123	  /* Check for setting a register that we know about.  */
3124	  if (recog_data.operand_type[i] != OP_IN
3125	      && REG_P (orig_operand[i]))
3126	    {
3127	      /* If we are assigning to a register that can be eliminated, it
3128		 must be as part of a PARALLEL, since the code above handles
3129		 single SETs.  We must indicate that we can no longer
3130		 eliminate this reg.  */
3131	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3132		   ep++)
3133		if (ep->from_rtx == orig_operand[i])
3134		  ep->can_eliminate = 0;
3135	    }
3136
3137	  /* Companion to the above plus substitution, we can allow
3138	     invariants as the source of a plain move.  */
3139	  is_set_src = false;
3140	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3141	    is_set_src = true;
3142	  in_plus = false;
3143	  if (plus_src
3144	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3145		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3146	    in_plus = true;
3147
3148	  substed_operand[i]
3149	    = eliminate_regs_1 (recog_data.operand[i], 0,
3150			        replace ? insn : NULL_RTX,
3151				is_set_src || in_plus);
3152	  if (substed_operand[i] != orig_operand[i])
3153	    val = 1;
3154	  /* Terminate the search in check_eliminable_occurrences at
3155	     this point.  */
3156	  *recog_data.operand_loc[i] = 0;
3157
3158	/* If an output operand changed from a REG to a MEM and INSN is an
3159	   insn, write a CLOBBER insn.  */
3160	  if (recog_data.operand_type[i] != OP_IN
3161	      && REG_P (orig_operand[i])
3162	      && MEM_P (substed_operand[i])
3163	      && replace)
3164	    emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3165			     insn);
3166	}
3167    }
3168
3169  for (i = 0; i < recog_data.n_dups; i++)
3170    *recog_data.dup_loc[i]
3171      = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3172
3173  /* If any eliminable remain, they aren't eliminable anymore.  */
3174  check_eliminable_occurrences (old_body);
3175
3176  /* Substitute the operands; the new values are in the substed_operand
3177     array.  */
3178  for (i = 0; i < recog_data.n_operands; i++)
3179    *recog_data.operand_loc[i] = substed_operand[i];
3180  for (i = 0; i < recog_data.n_dups; i++)
3181    *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3182
3183  /* If we are replacing a body that was a (set X (plus Y Z)), try to
3184     re-recognize the insn.  We do this in case we had a simple addition
3185     but now can do this as a load-address.  This saves an insn in this
3186     common case.
3187     If re-recognition fails, the old insn code number will still be used,
3188     and some register operands may have changed into PLUS expressions.
3189     These will be handled by find_reloads by loading them into a register
3190     again.  */
3191
3192  if (val)
3193    {
3194      /* If we aren't replacing things permanently and we changed something,
3195	 make another copy to ensure that all the RTL is new.  Otherwise
3196	 things can go wrong if find_reload swaps commutative operands
3197	 and one is inside RTL that has been copied while the other is not.  */
3198      new_body = old_body;
3199      if (! replace)
3200	{
3201	  new_body = copy_insn (old_body);
3202	  if (REG_NOTES (insn))
3203	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3204	}
3205      PATTERN (insn) = new_body;
3206
3207      /* If we had a move insn but now we don't, rerecognize it.  This will
3208	 cause spurious re-recognition if the old move had a PARALLEL since
3209	 the new one still will, but we can't call single_set without
3210	 having put NEW_BODY into the insn and the re-recognition won't
3211	 hurt in this rare case.  */
3212      /* ??? Why this huge if statement - why don't we just rerecognize the
3213	 thing always?  */
3214      if (! insn_is_asm
3215	  && old_set != 0
3216	  && ((REG_P (SET_SRC (old_set))
3217	       && (GET_CODE (new_body) != SET
3218		   || !REG_P (SET_SRC (new_body))))
3219	      /* If this was a load from or store to memory, compare
3220		 the MEM in recog_data.operand to the one in the insn.
3221		 If they are not equal, then rerecognize the insn.  */
3222	      || (old_set != 0
3223		  && ((MEM_P (SET_SRC (old_set))
3224		       && SET_SRC (old_set) != recog_data.operand[1])
3225		      || (MEM_P (SET_DEST (old_set))
3226			  && SET_DEST (old_set) != recog_data.operand[0])))
3227	      /* If this was an add insn before, rerecognize.  */
3228	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3229	{
3230	  int new_icode = recog (PATTERN (insn), insn, 0);
3231	  if (new_icode >= 0)
3232	    INSN_CODE (insn) = new_icode;
3233	}
3234    }
3235
3236  /* Restore the old body.  If there were any changes to it, we made a copy
3237     of it while the changes were still in place, so we'll correctly return
3238     a modified insn below.  */
3239  if (! replace)
3240    {
3241      /* Restore the old body.  */
3242      for (i = 0; i < recog_data.n_operands; i++)
3243	*recog_data.operand_loc[i] = orig_operand[i];
3244      for (i = 0; i < recog_data.n_dups; i++)
3245	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3246    }
3247
3248  /* Update all elimination pairs to reflect the status after the current
3249     insn.  The changes we make were determined by the earlier call to
3250     elimination_effects.
3251
3252     We also detect cases where register elimination cannot be done,
3253     namely, if a register would be both changed and referenced outside a MEM
3254     in the resulting insn since such an insn is often undefined and, even if
3255     not, we cannot know what meaning will be given to it.  Note that it is
3256     valid to have a register used in an address in an insn that changes it
3257     (presumably with a pre- or post-increment or decrement).
3258
3259     If anything changes, return nonzero.  */
3260
3261  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3262    {
3263      if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3264	ep->can_eliminate = 0;
3265
3266      ep->ref_outside_mem = 0;
3267
3268      if (ep->previous_offset != ep->offset)
3269	val = 1;
3270    }
3271
3272 done:
3273  /* If we changed something, perform elimination in REG_NOTES.  This is
3274     needed even when REPLACE is zero because a REG_DEAD note might refer
3275     to a register that we eliminate and could cause a different number
3276     of spill registers to be needed in the final reload pass than in
3277     the pre-passes.  */
3278  if (val && REG_NOTES (insn) != 0)
3279    REG_NOTES (insn)
3280      = eliminate_regs_1 (REG_NOTES (insn), 0, REG_NOTES (insn), true);
3281
3282  return val;
3283}
3284
3285/* Loop through all elimination pairs.
3286   Recalculate the number not at initial offset.
3287
3288   Compute the maximum offset (minimum offset if the stack does not
3289   grow downward) for each elimination pair.  */
3290
3291static void
3292update_eliminable_offsets (void)
3293{
3294  struct elim_table *ep;
3295
3296  num_not_at_initial_offset = 0;
3297  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3298    {
3299      ep->previous_offset = ep->offset;
3300      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3301	num_not_at_initial_offset++;
3302    }
3303}
3304
3305/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3306   replacement we currently believe is valid, mark it as not eliminable if X
3307   modifies DEST in any way other than by adding a constant integer to it.
3308
3309   If DEST is the frame pointer, we do nothing because we assume that
3310   all assignments to the hard frame pointer are nonlocal gotos and are being
3311   done at a time when they are valid and do not disturb anything else.
3312   Some machines want to eliminate a fake argument pointer with either the
3313   frame or stack pointer.  Assignments to the hard frame pointer must not
3314   prevent this elimination.
3315
3316   Called via note_stores from reload before starting its passes to scan
3317   the insns of the function.  */
3318
3319static void
3320mark_not_eliminable (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
3321{
3322  unsigned int i;
3323
3324  /* A SUBREG of a hard register here is just changing its mode.  We should
3325     not see a SUBREG of an eliminable hard register, but check just in
3326     case.  */
3327  if (GET_CODE (dest) == SUBREG)
3328    dest = SUBREG_REG (dest);
3329
3330  if (dest == hard_frame_pointer_rtx)
3331    return;
3332
3333  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3334    if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3335	&& (GET_CODE (x) != SET
3336	    || GET_CODE (SET_SRC (x)) != PLUS
3337	    || XEXP (SET_SRC (x), 0) != dest
3338	    || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3339      {
3340	reg_eliminate[i].can_eliminate_previous
3341	  = reg_eliminate[i].can_eliminate = 0;
3342	num_eliminable--;
3343      }
3344}
3345
3346/* Verify that the initial elimination offsets did not change since the
3347   last call to set_initial_elim_offsets.  This is used to catch cases
3348   where something illegal happened during reload_as_needed that could
3349   cause incorrect code to be generated if we did not check for it.  */
3350
3351static bool
3352verify_initial_elim_offsets (void)
3353{
3354  HOST_WIDE_INT t;
3355
3356  if (!num_eliminable)
3357    return true;
3358
3359#ifdef ELIMINABLE_REGS
3360  {
3361   struct elim_table *ep;
3362
3363   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3364     {
3365       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3366       if (t != ep->initial_offset)
3367	 return false;
3368     }
3369  }
3370#else
3371  INITIAL_FRAME_POINTER_OFFSET (t);
3372  if (t != reg_eliminate[0].initial_offset)
3373    return false;
3374#endif
3375
3376  return true;
3377}
3378
3379/* Reset all offsets on eliminable registers to their initial values.  */
3380
3381static void
3382set_initial_elim_offsets (void)
3383{
3384  struct elim_table *ep = reg_eliminate;
3385
3386#ifdef ELIMINABLE_REGS
3387  for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3388    {
3389      INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3390      ep->previous_offset = ep->offset = ep->initial_offset;
3391    }
3392#else
3393  INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3394  ep->previous_offset = ep->offset = ep->initial_offset;
3395#endif
3396
3397  num_not_at_initial_offset = 0;
3398}
3399
3400/* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3401
3402static void
3403set_initial_eh_label_offset (rtx label)
3404{
3405  set_label_offsets (label, NULL_RTX, 1);
3406}
3407
3408/* Initialize the known label offsets.
3409   Set a known offset for each forced label to be at the initial offset
3410   of each elimination.  We do this because we assume that all
3411   computed jumps occur from a location where each elimination is
3412   at its initial offset.
3413   For all other labels, show that we don't know the offsets.  */
3414
3415static void
3416set_initial_label_offsets (void)
3417{
3418  rtx x;
3419  memset (offsets_known_at, 0, num_labels);
3420
3421  for (x = forced_labels; x; x = XEXP (x, 1))
3422    if (XEXP (x, 0))
3423      set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3424
3425  for_each_eh_label (set_initial_eh_label_offset);
3426}
3427
3428/* Set all elimination offsets to the known values for the code label given
3429   by INSN.  */
3430
3431static void
3432set_offsets_for_label (rtx insn)
3433{
3434  unsigned int i;
3435  int label_nr = CODE_LABEL_NUMBER (insn);
3436  struct elim_table *ep;
3437
3438  num_not_at_initial_offset = 0;
3439  for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3440    {
3441      ep->offset = ep->previous_offset
3442		 = offsets_at[label_nr - first_label_num][i];
3443      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3444	num_not_at_initial_offset++;
3445    }
3446}
3447
3448/* See if anything that happened changes which eliminations are valid.
3449   For example, on the SPARC, whether or not the frame pointer can
3450   be eliminated can depend on what registers have been used.  We need
3451   not check some conditions again (such as flag_omit_frame_pointer)
3452   since they can't have changed.  */
3453
3454static void
3455update_eliminables (HARD_REG_SET *pset)
3456{
3457  int previous_frame_pointer_needed = frame_pointer_needed;
3458  struct elim_table *ep;
3459
3460  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3461    if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3462#ifdef ELIMINABLE_REGS
3463	|| ! CAN_ELIMINATE (ep->from, ep->to)
3464#endif
3465	)
3466      ep->can_eliminate = 0;
3467
3468  /* Look for the case where we have discovered that we can't replace
3469     register A with register B and that means that we will now be
3470     trying to replace register A with register C.  This means we can
3471     no longer replace register C with register B and we need to disable
3472     such an elimination, if it exists.  This occurs often with A == ap,
3473     B == sp, and C == fp.  */
3474
3475  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3476    {
3477      struct elim_table *op;
3478      int new_to = -1;
3479
3480      if (! ep->can_eliminate && ep->can_eliminate_previous)
3481	{
3482	  /* Find the current elimination for ep->from, if there is a
3483	     new one.  */
3484	  for (op = reg_eliminate;
3485	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3486	    if (op->from == ep->from && op->can_eliminate)
3487	      {
3488		new_to = op->to;
3489		break;
3490	      }
3491
3492	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3493	     disable it.  */
3494	  for (op = reg_eliminate;
3495	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3496	    if (op->from == new_to && op->to == ep->to)
3497	      op->can_eliminate = 0;
3498	}
3499    }
3500
3501  /* See if any registers that we thought we could eliminate the previous
3502     time are no longer eliminable.  If so, something has changed and we
3503     must spill the register.  Also, recompute the number of eliminable
3504     registers and see if the frame pointer is needed; it is if there is
3505     no elimination of the frame pointer that we can perform.  */
3506
3507  frame_pointer_needed = 1;
3508  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3509    {
3510      if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3511	  && ep->to != HARD_FRAME_POINTER_REGNUM)
3512	frame_pointer_needed = 0;
3513
3514      if (! ep->can_eliminate && ep->can_eliminate_previous)
3515	{
3516	  ep->can_eliminate_previous = 0;
3517	  SET_HARD_REG_BIT (*pset, ep->from);
3518	  num_eliminable--;
3519	}
3520    }
3521
3522  /* If we didn't need a frame pointer last time, but we do now, spill
3523     the hard frame pointer.  */
3524  if (frame_pointer_needed && ! previous_frame_pointer_needed)
3525    SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3526}
3527
3528/* Initialize the table of registers to eliminate.  */
3529
3530static void
3531init_elim_table (void)
3532{
3533  struct elim_table *ep;
3534#ifdef ELIMINABLE_REGS
3535  const struct elim_table_1 *ep1;
3536#endif
3537
3538  if (!reg_eliminate)
3539    reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3540
3541  /* Does this function require a frame pointer?  */
3542
3543  frame_pointer_needed = (! flag_omit_frame_pointer
3544			  /* ?? If EXIT_IGNORE_STACK is set, we will not save
3545			     and restore sp for alloca.  So we can't eliminate
3546			     the frame pointer in that case.  At some point,
3547			     we should improve this by emitting the
3548			     sp-adjusting insns for this case.  */
3549			  || (current_function_calls_alloca
3550			      && EXIT_IGNORE_STACK)
3551			  || current_function_accesses_prior_frames
3552			  || FRAME_POINTER_REQUIRED);
3553
3554  num_eliminable = 0;
3555
3556#ifdef ELIMINABLE_REGS
3557  for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3558       ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3559    {
3560      ep->from = ep1->from;
3561      ep->to = ep1->to;
3562      ep->can_eliminate = ep->can_eliminate_previous
3563	= (CAN_ELIMINATE (ep->from, ep->to)
3564	   && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3565    }
3566#else
3567  reg_eliminate[0].from = reg_eliminate_1[0].from;
3568  reg_eliminate[0].to = reg_eliminate_1[0].to;
3569  reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3570    = ! frame_pointer_needed;
3571#endif
3572
3573  /* Count the number of eliminable registers and build the FROM and TO
3574     REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
3575     gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3576     We depend on this.  */
3577  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3578    {
3579      num_eliminable += ep->can_eliminate;
3580      ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3581      ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3582    }
3583}
3584
3585/* Kick all pseudos out of hard register REGNO.
3586
3587   If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3588   because we found we can't eliminate some register.  In the case, no pseudos
3589   are allowed to be in the register, even if they are only in a block that
3590   doesn't require spill registers, unlike the case when we are spilling this
3591   hard reg to produce another spill register.
3592
3593   Return nonzero if any pseudos needed to be kicked out.  */
3594
3595static void
3596spill_hard_reg (unsigned int regno, int cant_eliminate)
3597{
3598  int i;
3599
3600  if (cant_eliminate)
3601    {
3602      SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3603      regs_ever_live[regno] = 1;
3604    }
3605
3606  /* Spill every pseudo reg that was allocated to this reg
3607     or to something that overlaps this reg.  */
3608
3609  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3610    if (reg_renumber[i] >= 0
3611	&& (unsigned int) reg_renumber[i] <= regno
3612	&& ((unsigned int) reg_renumber[i]
3613	    + hard_regno_nregs[(unsigned int) reg_renumber[i]]
3614			      [PSEUDO_REGNO_MODE (i)]
3615	    > regno))
3616      SET_REGNO_REG_SET (&spilled_pseudos, i);
3617}
3618
3619/* After find_reload_regs has been run for all insn that need reloads,
3620   and/or spill_hard_regs was called, this function is used to actually
3621   spill pseudo registers and try to reallocate them.  It also sets up the
3622   spill_regs array for use by choose_reload_regs.  */
3623
3624static int
3625finish_spills (int global)
3626{
3627  struct insn_chain *chain;
3628  int something_changed = 0;
3629  unsigned i;
3630  reg_set_iterator rsi;
3631
3632  /* Build the spill_regs array for the function.  */
3633  /* If there are some registers still to eliminate and one of the spill regs
3634     wasn't ever used before, additional stack space may have to be
3635     allocated to store this register.  Thus, we may have changed the offset
3636     between the stack and frame pointers, so mark that something has changed.
3637
3638     One might think that we need only set VAL to 1 if this is a call-used
3639     register.  However, the set of registers that must be saved by the
3640     prologue is not identical to the call-used set.  For example, the
3641     register used by the call insn for the return PC is a call-used register,
3642     but must be saved by the prologue.  */
3643
3644  n_spills = 0;
3645  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3646    if (TEST_HARD_REG_BIT (used_spill_regs, i))
3647      {
3648	spill_reg_order[i] = n_spills;
3649	spill_regs[n_spills++] = i;
3650	if (num_eliminable && ! regs_ever_live[i])
3651	  something_changed = 1;
3652	regs_ever_live[i] = 1;
3653      }
3654    else
3655      spill_reg_order[i] = -1;
3656
3657  EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3658    {
3659      /* Record the current hard register the pseudo is allocated to in
3660	 pseudo_previous_regs so we avoid reallocating it to the same
3661	 hard reg in a later pass.  */
3662      gcc_assert (reg_renumber[i] >= 0);
3663
3664      SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3665      /* Mark it as no longer having a hard register home.  */
3666      reg_renumber[i] = -1;
3667      /* We will need to scan everything again.  */
3668      something_changed = 1;
3669    }
3670
3671  /* Retry global register allocation if possible.  */
3672  if (global)
3673    {
3674      memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3675      /* For every insn that needs reloads, set the registers used as spill
3676	 regs in pseudo_forbidden_regs for every pseudo live across the
3677	 insn.  */
3678      for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3679	{
3680	  EXECUTE_IF_SET_IN_REG_SET
3681	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
3682	    {
3683	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3684				chain->used_spill_regs);
3685	    }
3686	  EXECUTE_IF_SET_IN_REG_SET
3687	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
3688	    {
3689	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
3690				chain->used_spill_regs);
3691	    }
3692	}
3693
3694      /* Retry allocating the spilled pseudos.  For each reg, merge the
3695	 various reg sets that indicate which hard regs can't be used,
3696	 and call retry_global_alloc.
3697	 We change spill_pseudos here to only contain pseudos that did not
3698	 get a new hard register.  */
3699      for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3700	if (reg_old_renumber[i] != reg_renumber[i])
3701	  {
3702	    HARD_REG_SET forbidden;
3703	    COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3704	    IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3705	    IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3706	    retry_global_alloc (i, forbidden);
3707	    if (reg_renumber[i] >= 0)
3708	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3709	  }
3710    }
3711
3712  /* Fix up the register information in the insn chain.
3713     This involves deleting those of the spilled pseudos which did not get
3714     a new hard register home from the live_{before,after} sets.  */
3715  for (chain = reload_insn_chain; chain; chain = chain->next)
3716    {
3717      HARD_REG_SET used_by_pseudos;
3718      HARD_REG_SET used_by_pseudos2;
3719
3720      AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3721      AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3722
3723      /* Mark any unallocated hard regs as available for spills.  That
3724	 makes inheritance work somewhat better.  */
3725      if (chain->need_reload)
3726	{
3727	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3728	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3729	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3730
3731	  /* Save the old value for the sanity test below.  */
3732	  COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3733
3734	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3735	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3736	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3737	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3738
3739	  /* Make sure we only enlarge the set.  */
3740	  GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3741	  gcc_unreachable ();
3742	ok:;
3743	}
3744    }
3745
3746  /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
3747  for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
3748    {
3749      int regno = reg_renumber[i];
3750      if (reg_old_renumber[i] == regno)
3751	continue;
3752
3753      alter_reg (i, reg_old_renumber[i]);
3754      reg_old_renumber[i] = regno;
3755      if (dump_file)
3756	{
3757	  if (regno == -1)
3758	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
3759	  else
3760	    fprintf (dump_file, " Register %d now in %d.\n\n",
3761		     i, reg_renumber[i]);
3762	}
3763    }
3764
3765  return something_changed;
3766}
3767
3768/* Find all paradoxical subregs within X and update reg_max_ref_width.  */
3769
3770static void
3771scan_paradoxical_subregs (rtx x)
3772{
3773  int i;
3774  const char *fmt;
3775  enum rtx_code code = GET_CODE (x);
3776
3777  switch (code)
3778    {
3779    case REG:
3780    case CONST_INT:
3781    case CONST:
3782    case SYMBOL_REF:
3783    case LABEL_REF:
3784    case CONST_DOUBLE:
3785    case CONST_VECTOR: /* shouldn't happen, but just in case.  */
3786    case CC0:
3787    case PC:
3788    case USE:
3789    case CLOBBER:
3790      return;
3791
3792    case SUBREG:
3793      if (REG_P (SUBREG_REG (x))
3794	  && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3795	reg_max_ref_width[REGNO (SUBREG_REG (x))]
3796	  = GET_MODE_SIZE (GET_MODE (x));
3797      return;
3798
3799    default:
3800      break;
3801    }
3802
3803  fmt = GET_RTX_FORMAT (code);
3804  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3805    {
3806      if (fmt[i] == 'e')
3807	scan_paradoxical_subregs (XEXP (x, i));
3808      else if (fmt[i] == 'E')
3809	{
3810	  int j;
3811	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3812	    scan_paradoxical_subregs (XVECEXP (x, i, j));
3813	}
3814    }
3815}
3816
3817/* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
3818   examine all of the reload insns between PREV and NEXT exclusive, and
3819   annotate all that may trap.  */
3820
3821static void
3822fixup_eh_region_note (rtx insn, rtx prev, rtx next)
3823{
3824  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3825  unsigned int trap_count;
3826  rtx i;
3827
3828  if (note == NULL)
3829    return;
3830
3831  if (may_trap_p (PATTERN (insn)))
3832    trap_count = 1;
3833  else
3834    {
3835      remove_note (insn, note);
3836      trap_count = 0;
3837    }
3838
3839  for (i = NEXT_INSN (prev); i != next; i = NEXT_INSN (i))
3840    if (INSN_P (i) && i != insn && may_trap_p (PATTERN (i)))
3841      {
3842	trap_count++;
3843	REG_NOTES (i)
3844	  = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (i));
3845      }
3846}
3847
3848/* Reload pseudo-registers into hard regs around each insn as needed.
3849   Additional register load insns are output before the insn that needs it
3850   and perhaps store insns after insns that modify the reloaded pseudo reg.
3851
3852   reg_last_reload_reg and reg_reloaded_contents keep track of
3853   which registers are already available in reload registers.
3854   We update these for the reloads that we perform,
3855   as the insns are scanned.  */
3856
3857static void
3858reload_as_needed (int live_known)
3859{
3860  struct insn_chain *chain;
3861#if defined (AUTO_INC_DEC)
3862  int i;
3863#endif
3864  rtx x;
3865
3866  memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
3867  memset (spill_reg_store, 0, sizeof spill_reg_store);
3868  reg_last_reload_reg = xcalloc (max_regno, sizeof (rtx));
3869  reg_has_output_reload = xmalloc (max_regno);
3870  CLEAR_HARD_REG_SET (reg_reloaded_valid);
3871  CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
3872
3873  set_initial_elim_offsets ();
3874
3875  for (chain = reload_insn_chain; chain; chain = chain->next)
3876    {
3877      rtx prev = 0;
3878      rtx insn = chain->insn;
3879      rtx old_next = NEXT_INSN (insn);
3880
3881      /* If we pass a label, copy the offsets from the label information
3882	 into the current offsets of each elimination.  */
3883      if (LABEL_P (insn))
3884	set_offsets_for_label (insn);
3885
3886      else if (INSN_P (insn))
3887	{
3888	  rtx oldpat = copy_rtx (PATTERN (insn));
3889
3890	  /* If this is a USE and CLOBBER of a MEM, ensure that any
3891	     references to eliminable registers have been removed.  */
3892
3893	  if ((GET_CODE (PATTERN (insn)) == USE
3894	       || GET_CODE (PATTERN (insn)) == CLOBBER)
3895	      && MEM_P (XEXP (PATTERN (insn), 0)))
3896	    XEXP (XEXP (PATTERN (insn), 0), 0)
3897	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3898				GET_MODE (XEXP (PATTERN (insn), 0)),
3899				NULL_RTX);
3900
3901	  /* If we need to do register elimination processing, do so.
3902	     This might delete the insn, in which case we are done.  */
3903	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3904	    {
3905	      eliminate_regs_in_insn (insn, 1);
3906	      if (NOTE_P (insn))
3907		{
3908		  update_eliminable_offsets ();
3909		  continue;
3910		}
3911	    }
3912
3913	  /* If need_elim is nonzero but need_reload is zero, one might think
3914	     that we could simply set n_reloads to 0.  However, find_reloads
3915	     could have done some manipulation of the insn (such as swapping
3916	     commutative operands), and these manipulations are lost during
3917	     the first pass for every insn that needs register elimination.
3918	     So the actions of find_reloads must be redone here.  */
3919
3920	  if (! chain->need_elim && ! chain->need_reload
3921	      && ! chain->need_operand_change)
3922	    n_reloads = 0;
3923	  /* First find the pseudo regs that must be reloaded for this insn.
3924	     This info is returned in the tables reload_... (see reload.h).
3925	     Also modify the body of INSN by substituting RELOAD
3926	     rtx's for those pseudo regs.  */
3927	  else
3928	    {
3929	      memset (reg_has_output_reload, 0, max_regno);
3930	      CLEAR_HARD_REG_SET (reg_is_output_reload);
3931
3932	      find_reloads (insn, 1, spill_indirect_levels, live_known,
3933			    spill_reg_order);
3934	    }
3935
3936	  if (n_reloads > 0)
3937	    {
3938	      rtx next = NEXT_INSN (insn);
3939	      rtx p;
3940
3941	      prev = PREV_INSN (insn);
3942
3943	      /* Now compute which reload regs to reload them into.  Perhaps
3944		 reusing reload regs from previous insns, or else output
3945		 load insns to reload them.  Maybe output store insns too.
3946		 Record the choices of reload reg in reload_reg_rtx.  */
3947	      choose_reload_regs (chain);
3948
3949	      /* Merge any reloads that we didn't combine for fear of
3950		 increasing the number of spill registers needed but now
3951		 discover can be safely merged.  */
3952	      if (SMALL_REGISTER_CLASSES)
3953		merge_assigned_reloads (insn);
3954
3955	      /* Generate the insns to reload operands into or out of
3956		 their reload regs.  */
3957	      emit_reload_insns (chain);
3958
3959	      /* Substitute the chosen reload regs from reload_reg_rtx
3960		 into the insn's body (or perhaps into the bodies of other
3961		 load and store insn that we just made for reloading
3962		 and that we moved the structure into).  */
3963	      subst_reloads (insn);
3964
3965	      /* Adjust the exception region notes for loads and stores.  */
3966	      if (flag_non_call_exceptions && !CALL_P (insn))
3967		fixup_eh_region_note (insn, prev, next);
3968
3969	      /* If this was an ASM, make sure that all the reload insns
3970		 we have generated are valid.  If not, give an error
3971		 and delete them.  */
3972	      if (asm_noperands (PATTERN (insn)) >= 0)
3973		for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3974		  if (p != insn && INSN_P (p)
3975		      && GET_CODE (PATTERN (p)) != USE
3976		      && (recog_memoized (p) < 0
3977			  || (extract_insn (p), ! constrain_operands (1))))
3978		    {
3979		      error_for_asm (insn,
3980				     "%<asm%> operand requires "
3981				     "impossible reload");
3982		      delete_insn (p);
3983		    }
3984	    }
3985
3986	  if (num_eliminable && chain->need_elim)
3987	    update_eliminable_offsets ();
3988
3989	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
3990	     is no longer validly lying around to save a future reload.
3991	     Note that this does not detect pseudos that were reloaded
3992	     for this insn in order to be stored in
3993	     (obeying register constraints).  That is correct; such reload
3994	     registers ARE still valid.  */
3995	  note_stores (oldpat, forget_old_reloads_1, NULL);
3996
3997	  /* There may have been CLOBBER insns placed after INSN.  So scan
3998	     between INSN and NEXT and use them to forget old reloads.  */
3999	  for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4000	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4001	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4002
4003#ifdef AUTO_INC_DEC
4004	  /* Likewise for regs altered by auto-increment in this insn.
4005	     REG_INC notes have been changed by reloading:
4006	     find_reloads_address_1 records substitutions for them,
4007	     which have been performed by subst_reloads above.  */
4008	  for (i = n_reloads - 1; i >= 0; i--)
4009	    {
4010	      rtx in_reg = rld[i].in_reg;
4011	      if (in_reg)
4012		{
4013		  enum rtx_code code = GET_CODE (in_reg);
4014		  /* PRE_INC / PRE_DEC will have the reload register ending up
4015		     with the same value as the stack slot, but that doesn't
4016		     hold true for POST_INC / POST_DEC.  Either we have to
4017		     convert the memory access to a true POST_INC / POST_DEC,
4018		     or we can't use the reload register for inheritance.  */
4019		  if ((code == POST_INC || code == POST_DEC)
4020		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4021					    REGNO (rld[i].reg_rtx))
4022		      /* Make sure it is the inc/dec pseudo, and not
4023			 some other (e.g. output operand) pseudo.  */
4024		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4025			  == REGNO (XEXP (in_reg, 0))))
4026
4027		    {
4028		      rtx reload_reg = rld[i].reg_rtx;
4029		      enum machine_mode mode = GET_MODE (reload_reg);
4030		      int n = 0;
4031		      rtx p;
4032
4033		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4034			{
4035			  /* We really want to ignore REG_INC notes here, so
4036			     use PATTERN (p) as argument to reg_set_p .  */
4037			  if (reg_set_p (reload_reg, PATTERN (p)))
4038			    break;
4039			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4040			  if (! n)
4041			    continue;
4042			  if (n == 1)
4043			    {
4044			      n = validate_replace_rtx (reload_reg,
4045							gen_rtx_fmt_e (code,
4046								       mode,
4047								       reload_reg),
4048							p);
4049
4050			      /* We must also verify that the constraints
4051				 are met after the replacement.  */
4052			      extract_insn (p);
4053			      if (n)
4054				n = constrain_operands (1);
4055			      else
4056				break;
4057
4058			      /* If the constraints were not met, then
4059				 undo the replacement.  */
4060			      if (!n)
4061				{
4062				  validate_replace_rtx (gen_rtx_fmt_e (code,
4063								       mode,
4064								       reload_reg),
4065							reload_reg, p);
4066				  break;
4067				}
4068
4069			    }
4070			  break;
4071			}
4072		      if (n == 1)
4073			{
4074			  REG_NOTES (p)
4075			    = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4076						 REG_NOTES (p));
4077			  /* Mark this as having an output reload so that the
4078			     REG_INC processing code below won't invalidate
4079			     the reload for inheritance.  */
4080			  SET_HARD_REG_BIT (reg_is_output_reload,
4081					    REGNO (reload_reg));
4082			  reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4083			}
4084		      else
4085			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4086					      NULL);
4087		    }
4088		  else if ((code == PRE_INC || code == PRE_DEC)
4089			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4090						 REGNO (rld[i].reg_rtx))
4091			   /* Make sure it is the inc/dec pseudo, and not
4092			      some other (e.g. output operand) pseudo.  */
4093			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4094			       == REGNO (XEXP (in_reg, 0))))
4095		    {
4096		      SET_HARD_REG_BIT (reg_is_output_reload,
4097					REGNO (rld[i].reg_rtx));
4098		      reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4099		    }
4100		}
4101	    }
4102	  /* If a pseudo that got a hard register is auto-incremented,
4103	     we must purge records of copying it into pseudos without
4104	     hard registers.  */
4105	  for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4106	    if (REG_NOTE_KIND (x) == REG_INC)
4107	      {
4108		/* See if this pseudo reg was reloaded in this insn.
4109		   If so, its last-reload info is still valid
4110		   because it is based on this insn's reload.  */
4111		for (i = 0; i < n_reloads; i++)
4112		  if (rld[i].out == XEXP (x, 0))
4113		    break;
4114
4115		if (i == n_reloads)
4116		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4117	      }
4118#endif
4119	}
4120      /* A reload reg's contents are unknown after a label.  */
4121      if (LABEL_P (insn))
4122	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4123
4124      /* Don't assume a reload reg is still good after a call insn
4125	 if it is a call-used reg, or if it contains a value that will
4126         be partially clobbered by the call.  */
4127      else if (CALL_P (insn))
4128	{
4129	AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4130	AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4131	}
4132    }
4133
4134  /* Clean up.  */
4135  free (reg_last_reload_reg);
4136  free (reg_has_output_reload);
4137}
4138
4139/* Discard all record of any value reloaded from X,
4140   or reloaded in X from someplace else;
4141   unless X is an output reload reg of the current insn.
4142
4143   X may be a hard reg (the reload reg)
4144   or it may be a pseudo reg that was reloaded from.  */
4145
4146static void
4147forget_old_reloads_1 (rtx x, rtx ignored ATTRIBUTE_UNUSED,
4148		      void *data ATTRIBUTE_UNUSED)
4149{
4150  unsigned int regno;
4151  unsigned int nr;
4152
4153  /* note_stores does give us subregs of hard regs,
4154     subreg_regno_offset requires a hard reg.  */
4155  while (GET_CODE (x) == SUBREG)
4156    {
4157      /* We ignore the subreg offset when calculating the regno,
4158	 because we are using the entire underlying hard register
4159	 below.  */
4160      x = SUBREG_REG (x);
4161    }
4162
4163  if (!REG_P (x))
4164    return;
4165
4166  regno = REGNO (x);
4167
4168  if (regno >= FIRST_PSEUDO_REGISTER)
4169    nr = 1;
4170  else
4171    {
4172      unsigned int i;
4173
4174      nr = hard_regno_nregs[regno][GET_MODE (x)];
4175      /* Storing into a spilled-reg invalidates its contents.
4176	 This can happen if a block-local pseudo is allocated to that reg
4177	 and it wasn't spilled because this block's total need is 0.
4178	 Then some insn might have an optional reload and use this reg.  */
4179      for (i = 0; i < nr; i++)
4180	/* But don't do this if the reg actually serves as an output
4181	   reload reg in the current instruction.  */
4182	if (n_reloads == 0
4183	    || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4184	  {
4185	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4186	    CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
4187	    spill_reg_store[regno + i] = 0;
4188	  }
4189    }
4190
4191  /* Since value of X has changed,
4192     forget any value previously copied from it.  */
4193
4194  while (nr-- > 0)
4195    /* But don't forget a copy if this is the output reload
4196       that establishes the copy's validity.  */
4197    if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4198      reg_last_reload_reg[regno + nr] = 0;
4199}
4200
4201/* The following HARD_REG_SETs indicate when each hard register is
4202   used for a reload of various parts of the current insn.  */
4203
4204/* If reg is unavailable for all reloads.  */
4205static HARD_REG_SET reload_reg_unavailable;
4206/* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4207static HARD_REG_SET reload_reg_used;
4208/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4209static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4210/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4211static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4212/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4213static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4214/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4215static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4216/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4217static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4218/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4219static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4220/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4221static HARD_REG_SET reload_reg_used_in_op_addr;
4222/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4223static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4224/* If reg is in use for a RELOAD_FOR_INSN reload.  */
4225static HARD_REG_SET reload_reg_used_in_insn;
4226/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4227static HARD_REG_SET reload_reg_used_in_other_addr;
4228
4229/* If reg is in use as a reload reg for any sort of reload.  */
4230static HARD_REG_SET reload_reg_used_at_all;
4231
4232/* If reg is use as an inherited reload.  We just mark the first register
4233   in the group.  */
4234static HARD_REG_SET reload_reg_used_for_inherit;
4235
4236/* Records which hard regs are used in any way, either as explicit use or
4237   by being allocated to a pseudo during any point of the current insn.  */
4238static HARD_REG_SET reg_used_in_insn;
4239
4240/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4241   TYPE. MODE is used to indicate how many consecutive regs are
4242   actually used.  */
4243
4244static void
4245mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4246			enum machine_mode mode)
4247{
4248  unsigned int nregs = hard_regno_nregs[regno][mode];
4249  unsigned int i;
4250
4251  for (i = regno; i < nregs + regno; i++)
4252    {
4253      switch (type)
4254	{
4255	case RELOAD_OTHER:
4256	  SET_HARD_REG_BIT (reload_reg_used, i);
4257	  break;
4258
4259	case RELOAD_FOR_INPUT_ADDRESS:
4260	  SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4261	  break;
4262
4263	case RELOAD_FOR_INPADDR_ADDRESS:
4264	  SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4265	  break;
4266
4267	case RELOAD_FOR_OUTPUT_ADDRESS:
4268	  SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4269	  break;
4270
4271	case RELOAD_FOR_OUTADDR_ADDRESS:
4272	  SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4273	  break;
4274
4275	case RELOAD_FOR_OPERAND_ADDRESS:
4276	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4277	  break;
4278
4279	case RELOAD_FOR_OPADDR_ADDR:
4280	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4281	  break;
4282
4283	case RELOAD_FOR_OTHER_ADDRESS:
4284	  SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4285	  break;
4286
4287	case RELOAD_FOR_INPUT:
4288	  SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4289	  break;
4290
4291	case RELOAD_FOR_OUTPUT:
4292	  SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4293	  break;
4294
4295	case RELOAD_FOR_INSN:
4296	  SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4297	  break;
4298	}
4299
4300      SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4301    }
4302}
4303
4304/* Similarly, but show REGNO is no longer in use for a reload.  */
4305
4306static void
4307clear_reload_reg_in_use (unsigned int regno, int opnum,
4308			 enum reload_type type, enum machine_mode mode)
4309{
4310  unsigned int nregs = hard_regno_nregs[regno][mode];
4311  unsigned int start_regno, end_regno, r;
4312  int i;
4313  /* A complication is that for some reload types, inheritance might
4314     allow multiple reloads of the same types to share a reload register.
4315     We set check_opnum if we have to check only reloads with the same
4316     operand number, and check_any if we have to check all reloads.  */
4317  int check_opnum = 0;
4318  int check_any = 0;
4319  HARD_REG_SET *used_in_set;
4320
4321  switch (type)
4322    {
4323    case RELOAD_OTHER:
4324      used_in_set = &reload_reg_used;
4325      break;
4326
4327    case RELOAD_FOR_INPUT_ADDRESS:
4328      used_in_set = &reload_reg_used_in_input_addr[opnum];
4329      break;
4330
4331    case RELOAD_FOR_INPADDR_ADDRESS:
4332      check_opnum = 1;
4333      used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4334      break;
4335
4336    case RELOAD_FOR_OUTPUT_ADDRESS:
4337      used_in_set = &reload_reg_used_in_output_addr[opnum];
4338      break;
4339
4340    case RELOAD_FOR_OUTADDR_ADDRESS:
4341      check_opnum = 1;
4342      used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4343      break;
4344
4345    case RELOAD_FOR_OPERAND_ADDRESS:
4346      used_in_set = &reload_reg_used_in_op_addr;
4347      break;
4348
4349    case RELOAD_FOR_OPADDR_ADDR:
4350      check_any = 1;
4351      used_in_set = &reload_reg_used_in_op_addr_reload;
4352      break;
4353
4354    case RELOAD_FOR_OTHER_ADDRESS:
4355      used_in_set = &reload_reg_used_in_other_addr;
4356      check_any = 1;
4357      break;
4358
4359    case RELOAD_FOR_INPUT:
4360      used_in_set = &reload_reg_used_in_input[opnum];
4361      break;
4362
4363    case RELOAD_FOR_OUTPUT:
4364      used_in_set = &reload_reg_used_in_output[opnum];
4365      break;
4366
4367    case RELOAD_FOR_INSN:
4368      used_in_set = &reload_reg_used_in_insn;
4369      break;
4370    default:
4371      gcc_unreachable ();
4372    }
4373  /* We resolve conflicts with remaining reloads of the same type by
4374     excluding the intervals of reload registers by them from the
4375     interval of freed reload registers.  Since we only keep track of
4376     one set of interval bounds, we might have to exclude somewhat
4377     more than what would be necessary if we used a HARD_REG_SET here.
4378     But this should only happen very infrequently, so there should
4379     be no reason to worry about it.  */
4380
4381  start_regno = regno;
4382  end_regno = regno + nregs;
4383  if (check_opnum || check_any)
4384    {
4385      for (i = n_reloads - 1; i >= 0; i--)
4386	{
4387	  if (rld[i].when_needed == type
4388	      && (check_any || rld[i].opnum == opnum)
4389	      && rld[i].reg_rtx)
4390	    {
4391	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4392	      unsigned int conflict_end
4393		= (conflict_start
4394		   + hard_regno_nregs[conflict_start][rld[i].mode]);
4395
4396	      /* If there is an overlap with the first to-be-freed register,
4397		 adjust the interval start.  */
4398	      if (conflict_start <= start_regno && conflict_end > start_regno)
4399		start_regno = conflict_end;
4400	      /* Otherwise, if there is a conflict with one of the other
4401		 to-be-freed registers, adjust the interval end.  */
4402	      if (conflict_start > start_regno && conflict_start < end_regno)
4403		end_regno = conflict_start;
4404	    }
4405	}
4406    }
4407
4408  for (r = start_regno; r < end_regno; r++)
4409    CLEAR_HARD_REG_BIT (*used_in_set, r);
4410}
4411
4412/* 1 if reg REGNO is free as a reload reg for a reload of the sort
4413   specified by OPNUM and TYPE.  */
4414
4415static int
4416reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4417{
4418  int i;
4419
4420  /* In use for a RELOAD_OTHER means it's not available for anything.  */
4421  if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4422      || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4423    return 0;
4424
4425  switch (type)
4426    {
4427    case RELOAD_OTHER:
4428      /* In use for anything means we can't use it for RELOAD_OTHER.  */
4429      if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4430	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4431	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4432	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4433	return 0;
4434
4435      for (i = 0; i < reload_n_operands; i++)
4436	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4437	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4438	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4439	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4440	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4441	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4442	  return 0;
4443
4444      return 1;
4445
4446    case RELOAD_FOR_INPUT:
4447      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4448	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4449	return 0;
4450
4451      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4452	return 0;
4453
4454      /* If it is used for some other input, can't use it.  */
4455      for (i = 0; i < reload_n_operands; i++)
4456	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4457	  return 0;
4458
4459      /* If it is used in a later operand's address, can't use it.  */
4460      for (i = opnum + 1; i < reload_n_operands; i++)
4461	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4462	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4463	  return 0;
4464
4465      return 1;
4466
4467    case RELOAD_FOR_INPUT_ADDRESS:
4468      /* Can't use a register if it is used for an input address for this
4469	 operand or used as an input in an earlier one.  */
4470      if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4471	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4472	return 0;
4473
4474      for (i = 0; i < opnum; i++)
4475	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4476	  return 0;
4477
4478      return 1;
4479
4480    case RELOAD_FOR_INPADDR_ADDRESS:
4481      /* Can't use a register if it is used for an input address
4482	 for this operand or used as an input in an earlier
4483	 one.  */
4484      if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4485	return 0;
4486
4487      for (i = 0; i < opnum; i++)
4488	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4489	  return 0;
4490
4491      return 1;
4492
4493    case RELOAD_FOR_OUTPUT_ADDRESS:
4494      /* Can't use a register if it is used for an output address for this
4495	 operand or used as an output in this or a later operand.  Note
4496	 that multiple output operands are emitted in reverse order, so
4497	 the conflicting ones are those with lower indices.  */
4498      if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4499	return 0;
4500
4501      for (i = 0; i <= opnum; i++)
4502	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4503	  return 0;
4504
4505      return 1;
4506
4507    case RELOAD_FOR_OUTADDR_ADDRESS:
4508      /* Can't use a register if it is used for an output address
4509	 for this operand or used as an output in this or a
4510	 later operand.  Note that multiple output operands are
4511	 emitted in reverse order, so the conflicting ones are
4512	 those with lower indices.  */
4513      if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4514	return 0;
4515
4516      for (i = 0; i <= opnum; i++)
4517	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4518	  return 0;
4519
4520      return 1;
4521
4522    case RELOAD_FOR_OPERAND_ADDRESS:
4523      for (i = 0; i < reload_n_operands; i++)
4524	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4525	  return 0;
4526
4527      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4528	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4529
4530    case RELOAD_FOR_OPADDR_ADDR:
4531      for (i = 0; i < reload_n_operands; i++)
4532	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4533	  return 0;
4534
4535      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4536
4537    case RELOAD_FOR_OUTPUT:
4538      /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4539	 outputs, or an operand address for this or an earlier output.
4540	 Note that multiple output operands are emitted in reverse order,
4541	 so the conflicting ones are those with higher indices.  */
4542      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4543	return 0;
4544
4545      for (i = 0; i < reload_n_operands; i++)
4546	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4547	  return 0;
4548
4549      for (i = opnum; i < reload_n_operands; i++)
4550	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4551	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4552	  return 0;
4553
4554      return 1;
4555
4556    case RELOAD_FOR_INSN:
4557      for (i = 0; i < reload_n_operands; i++)
4558	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4559	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4560	  return 0;
4561
4562      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4563	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4564
4565    case RELOAD_FOR_OTHER_ADDRESS:
4566      return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4567
4568    default:
4569      gcc_unreachable ();
4570    }
4571}
4572
4573/* Return 1 if the value in reload reg REGNO, as used by a reload
4574   needed for the part of the insn specified by OPNUM and TYPE,
4575   is still available in REGNO at the end of the insn.
4576
4577   We can assume that the reload reg was already tested for availability
4578   at the time it is needed, and we should not check this again,
4579   in case the reg has already been marked in use.  */
4580
4581static int
4582reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4583{
4584  int i;
4585
4586  switch (type)
4587    {
4588    case RELOAD_OTHER:
4589      /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4590	 its value must reach the end.  */
4591      return 1;
4592
4593      /* If this use is for part of the insn,
4594	 its value reaches if no subsequent part uses the same register.
4595	 Just like the above function, don't try to do this with lots
4596	 of fallthroughs.  */
4597
4598    case RELOAD_FOR_OTHER_ADDRESS:
4599      /* Here we check for everything else, since these don't conflict
4600	 with anything else and everything comes later.  */
4601
4602      for (i = 0; i < reload_n_operands; i++)
4603	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4604	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4605	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4606	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4607	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4608	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4609	  return 0;
4610
4611      return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4612	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4613	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4614	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4615
4616    case RELOAD_FOR_INPUT_ADDRESS:
4617    case RELOAD_FOR_INPADDR_ADDRESS:
4618      /* Similar, except that we check only for this and subsequent inputs
4619	 and the address of only subsequent inputs and we do not need
4620	 to check for RELOAD_OTHER objects since they are known not to
4621	 conflict.  */
4622
4623      for (i = opnum; i < reload_n_operands; i++)
4624	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4625	  return 0;
4626
4627      for (i = opnum + 1; i < reload_n_operands; i++)
4628	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4629	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4630	  return 0;
4631
4632      for (i = 0; i < reload_n_operands; i++)
4633	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4634	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4635	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4636	  return 0;
4637
4638      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4639	return 0;
4640
4641      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4642	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4643	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4644
4645    case RELOAD_FOR_INPUT:
4646      /* Similar to input address, except we start at the next operand for
4647	 both input and input address and we do not check for
4648	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4649	 would conflict.  */
4650
4651      for (i = opnum + 1; i < reload_n_operands; i++)
4652	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4653	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4654	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4655	  return 0;
4656
4657      /* ... fall through ...  */
4658
4659    case RELOAD_FOR_OPERAND_ADDRESS:
4660      /* Check outputs and their addresses.  */
4661
4662      for (i = 0; i < reload_n_operands; i++)
4663	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4664	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4665	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4666	  return 0;
4667
4668      return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4669
4670    case RELOAD_FOR_OPADDR_ADDR:
4671      for (i = 0; i < reload_n_operands; i++)
4672	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4673	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4674	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4675	  return 0;
4676
4677      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4678	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4679	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4680
4681    case RELOAD_FOR_INSN:
4682      /* These conflict with other outputs with RELOAD_OTHER.  So
4683	 we need only check for output addresses.  */
4684
4685      opnum = reload_n_operands;
4686
4687      /* ... fall through ...  */
4688
4689    case RELOAD_FOR_OUTPUT:
4690    case RELOAD_FOR_OUTPUT_ADDRESS:
4691    case RELOAD_FOR_OUTADDR_ADDRESS:
4692      /* We already know these can't conflict with a later output.  So the
4693	 only thing to check are later output addresses.
4694	 Note that multiple output operands are emitted in reverse order,
4695	 so the conflicting ones are those with lower indices.  */
4696      for (i = 0; i < opnum; i++)
4697	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4698	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4699	  return 0;
4700
4701      return 1;
4702
4703    default:
4704      gcc_unreachable ();
4705    }
4706}
4707
4708/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4709   Return 0 otherwise.
4710
4711   This function uses the same algorithm as reload_reg_free_p above.  */
4712
4713static int
4714reloads_conflict (int r1, int r2)
4715{
4716  enum reload_type r1_type = rld[r1].when_needed;
4717  enum reload_type r2_type = rld[r2].when_needed;
4718  int r1_opnum = rld[r1].opnum;
4719  int r2_opnum = rld[r2].opnum;
4720
4721  /* RELOAD_OTHER conflicts with everything.  */
4722  if (r2_type == RELOAD_OTHER)
4723    return 1;
4724
4725  /* Otherwise, check conflicts differently for each type.  */
4726
4727  switch (r1_type)
4728    {
4729    case RELOAD_FOR_INPUT:
4730      return (r2_type == RELOAD_FOR_INSN
4731	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4732	      || r2_type == RELOAD_FOR_OPADDR_ADDR
4733	      || r2_type == RELOAD_FOR_INPUT
4734	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4735		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4736		  && r2_opnum > r1_opnum));
4737
4738    case RELOAD_FOR_INPUT_ADDRESS:
4739      return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4740	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4741
4742    case RELOAD_FOR_INPADDR_ADDRESS:
4743      return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4744	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4745
4746    case RELOAD_FOR_OUTPUT_ADDRESS:
4747      return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4748	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4749
4750    case RELOAD_FOR_OUTADDR_ADDRESS:
4751      return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4752	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4753
4754    case RELOAD_FOR_OPERAND_ADDRESS:
4755      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4756	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4757
4758    case RELOAD_FOR_OPADDR_ADDR:
4759      return (r2_type == RELOAD_FOR_INPUT
4760	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
4761
4762    case RELOAD_FOR_OUTPUT:
4763      return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4764	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4765		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4766		  && r2_opnum >= r1_opnum));
4767
4768    case RELOAD_FOR_INSN:
4769      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4770	      || r2_type == RELOAD_FOR_INSN
4771	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4772
4773    case RELOAD_FOR_OTHER_ADDRESS:
4774      return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4775
4776    case RELOAD_OTHER:
4777      return 1;
4778
4779    default:
4780      gcc_unreachable ();
4781    }
4782}
4783
4784/* Indexed by reload number, 1 if incoming value
4785   inherited from previous insns.  */
4786static char reload_inherited[MAX_RELOADS];
4787
4788/* For an inherited reload, this is the insn the reload was inherited from,
4789   if we know it.  Otherwise, this is 0.  */
4790static rtx reload_inheritance_insn[MAX_RELOADS];
4791
4792/* If nonzero, this is a place to get the value of the reload,
4793   rather than using reload_in.  */
4794static rtx reload_override_in[MAX_RELOADS];
4795
4796/* For each reload, the hard register number of the register used,
4797   or -1 if we did not need a register for this reload.  */
4798static int reload_spill_index[MAX_RELOADS];
4799
4800/* Subroutine of free_for_value_p, used to check a single register.
4801   START_REGNO is the starting regno of the full reload register
4802   (possibly comprising multiple hard registers) that we are considering.  */
4803
4804static int
4805reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
4806			     enum reload_type type, rtx value, rtx out,
4807			     int reloadnum, int ignore_address_reloads)
4808{
4809  int time1;
4810  /* Set if we see an input reload that must not share its reload register
4811     with any new earlyclobber, but might otherwise share the reload
4812     register with an output or input-output reload.  */
4813  int check_earlyclobber = 0;
4814  int i;
4815  int copy = 0;
4816
4817  if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4818    return 0;
4819
4820  if (out == const0_rtx)
4821    {
4822      copy = 1;
4823      out = NULL_RTX;
4824    }
4825
4826  /* We use some pseudo 'time' value to check if the lifetimes of the
4827     new register use would overlap with the one of a previous reload
4828     that is not read-only or uses a different value.
4829     The 'time' used doesn't have to be linear in any shape or form, just
4830     monotonic.
4831     Some reload types use different 'buckets' for each operand.
4832     So there are MAX_RECOG_OPERANDS different time values for each
4833     such reload type.
4834     We compute TIME1 as the time when the register for the prospective
4835     new reload ceases to be live, and TIME2 for each existing
4836     reload as the time when that the reload register of that reload
4837     becomes live.
4838     Where there is little to be gained by exact lifetime calculations,
4839     we just make conservative assumptions, i.e. a longer lifetime;
4840     this is done in the 'default:' cases.  */
4841  switch (type)
4842    {
4843    case RELOAD_FOR_OTHER_ADDRESS:
4844      /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
4845      time1 = copy ? 0 : 1;
4846      break;
4847    case RELOAD_OTHER:
4848      time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4849      break;
4850      /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4851	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
4852	 respectively, to the time values for these, we get distinct time
4853	 values.  To get distinct time values for each operand, we have to
4854	 multiply opnum by at least three.  We round that up to four because
4855	 multiply by four is often cheaper.  */
4856    case RELOAD_FOR_INPADDR_ADDRESS:
4857      time1 = opnum * 4 + 2;
4858      break;
4859    case RELOAD_FOR_INPUT_ADDRESS:
4860      time1 = opnum * 4 + 3;
4861      break;
4862    case RELOAD_FOR_INPUT:
4863      /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4864	 executes (inclusive).  */
4865      time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4866      break;
4867    case RELOAD_FOR_OPADDR_ADDR:
4868      /* opnum * 4 + 4
4869	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4870      time1 = MAX_RECOG_OPERANDS * 4 + 1;
4871      break;
4872    case RELOAD_FOR_OPERAND_ADDRESS:
4873      /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4874	 is executed.  */
4875      time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4876      break;
4877    case RELOAD_FOR_OUTADDR_ADDRESS:
4878      time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4879      break;
4880    case RELOAD_FOR_OUTPUT_ADDRESS:
4881      time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4882      break;
4883    default:
4884      time1 = MAX_RECOG_OPERANDS * 5 + 5;
4885    }
4886
4887  for (i = 0; i < n_reloads; i++)
4888    {
4889      rtx reg = rld[i].reg_rtx;
4890      if (reg && REG_P (reg)
4891	  && ((unsigned) regno - true_regnum (reg)
4892	      <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
4893	  && i != reloadnum)
4894	{
4895	  rtx other_input = rld[i].in;
4896
4897	  /* If the other reload loads the same input value, that
4898	     will not cause a conflict only if it's loading it into
4899	     the same register.  */
4900	  if (true_regnum (reg) != start_regno)
4901	    other_input = NULL_RTX;
4902	  if (! other_input || ! rtx_equal_p (other_input, value)
4903	      || rld[i].out || out)
4904	    {
4905	      int time2;
4906	      switch (rld[i].when_needed)
4907		{
4908		case RELOAD_FOR_OTHER_ADDRESS:
4909		  time2 = 0;
4910		  break;
4911		case RELOAD_FOR_INPADDR_ADDRESS:
4912		  /* find_reloads makes sure that a
4913		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4914		     by at most one - the first -
4915		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
4916		     address reload is inherited, the address address reload
4917		     goes away, so we can ignore this conflict.  */
4918		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4919		      && ignore_address_reloads
4920		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4921			 Then the address address is still needed to store
4922			 back the new address.  */
4923		      && ! rld[reloadnum].out)
4924		    continue;
4925		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4926		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4927		     reloads go away.  */
4928		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4929		      && ignore_address_reloads
4930		      /* Unless we are reloading an auto_inc expression.  */
4931		      && ! rld[reloadnum].out)
4932		    continue;
4933		  time2 = rld[i].opnum * 4 + 2;
4934		  break;
4935		case RELOAD_FOR_INPUT_ADDRESS:
4936		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4937		      && ignore_address_reloads
4938		      && ! rld[reloadnum].out)
4939		    continue;
4940		  time2 = rld[i].opnum * 4 + 3;
4941		  break;
4942		case RELOAD_FOR_INPUT:
4943		  time2 = rld[i].opnum * 4 + 4;
4944		  check_earlyclobber = 1;
4945		  break;
4946		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
4947		     == MAX_RECOG_OPERAND * 4  */
4948		case RELOAD_FOR_OPADDR_ADDR:
4949		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
4950		      && ignore_address_reloads
4951		      && ! rld[reloadnum].out)
4952		    continue;
4953		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
4954		  break;
4955		case RELOAD_FOR_OPERAND_ADDRESS:
4956		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
4957		  check_earlyclobber = 1;
4958		  break;
4959		case RELOAD_FOR_INSN:
4960		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
4961		  break;
4962		case RELOAD_FOR_OUTPUT:
4963		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
4964		     instruction is executed.  */
4965		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
4966		  break;
4967		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4968		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4969		     value.  */
4970		case RELOAD_FOR_OUTADDR_ADDRESS:
4971		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
4972		      && ignore_address_reloads
4973		      && ! rld[reloadnum].out)
4974		    continue;
4975		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
4976		  break;
4977		case RELOAD_FOR_OUTPUT_ADDRESS:
4978		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
4979		  break;
4980		case RELOAD_OTHER:
4981		  /* If there is no conflict in the input part, handle this
4982		     like an output reload.  */
4983		  if (! rld[i].in || rtx_equal_p (other_input, value))
4984		    {
4985		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
4986		      /* Earlyclobbered outputs must conflict with inputs.  */
4987		      if (earlyclobber_operand_p (rld[i].out))
4988			time2 = MAX_RECOG_OPERANDS * 4 + 3;
4989
4990		      break;
4991		    }
4992		  time2 = 1;
4993		  /* RELOAD_OTHER might be live beyond instruction execution,
4994		     but this is not obvious when we set time2 = 1.  So check
4995		     here if there might be a problem with the new reload
4996		     clobbering the register used by the RELOAD_OTHER.  */
4997		  if (out)
4998		    return 0;
4999		  break;
5000		default:
5001		  return 0;
5002		}
5003	      if ((time1 >= time2
5004		   && (! rld[i].in || rld[i].out
5005		       || ! rtx_equal_p (other_input, value)))
5006		  || (out && rld[reloadnum].out_reg
5007		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5008		return 0;
5009	    }
5010	}
5011    }
5012
5013  /* Earlyclobbered outputs must conflict with inputs.  */
5014  if (check_earlyclobber && out && earlyclobber_operand_p (out))
5015    return 0;
5016
5017  return 1;
5018}
5019
5020/* Return 1 if the value in reload reg REGNO, as used by a reload
5021   needed for the part of the insn specified by OPNUM and TYPE,
5022   may be used to load VALUE into it.
5023
5024   MODE is the mode in which the register is used, this is needed to
5025   determine how many hard regs to test.
5026
5027   Other read-only reloads with the same value do not conflict
5028   unless OUT is nonzero and these other reloads have to live while
5029   output reloads live.
5030   If OUT is CONST0_RTX, this is a special case: it means that the
5031   test should not be for using register REGNO as reload register, but
5032   for copying from register REGNO into the reload register.
5033
5034   RELOADNUM is the number of the reload we want to load this value for;
5035   a reload does not conflict with itself.
5036
5037   When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5038   reloads that load an address for the very reload we are considering.
5039
5040   The caller has to make sure that there is no conflict with the return
5041   register.  */
5042
5043static int
5044free_for_value_p (int regno, enum machine_mode mode, int opnum,
5045		  enum reload_type type, rtx value, rtx out, int reloadnum,
5046		  int ignore_address_reloads)
5047{
5048  int nregs = hard_regno_nregs[regno][mode];
5049  while (nregs-- > 0)
5050    if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5051				       value, out, reloadnum,
5052				       ignore_address_reloads))
5053      return 0;
5054  return 1;
5055}
5056
5057/* Return nonzero if the rtx X is invariant over the current function.  */
5058/* ??? Actually, the places where we use this expect exactly what is
5059   tested here, and not everything that is function invariant.  In
5060   particular, the frame pointer and arg pointer are special cased;
5061   pic_offset_table_rtx is not, and we must not spill these things to
5062   memory.  */
5063
5064int
5065function_invariant_p (rtx x)
5066{
5067  if (CONSTANT_P (x))
5068    return 1;
5069  if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5070    return 1;
5071  if (GET_CODE (x) == PLUS
5072      && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5073      && CONSTANT_P (XEXP (x, 1)))
5074    return 1;
5075  return 0;
5076}
5077
5078/* Determine whether the reload reg X overlaps any rtx'es used for
5079   overriding inheritance.  Return nonzero if so.  */
5080
5081static int
5082conflicts_with_override (rtx x)
5083{
5084  int i;
5085  for (i = 0; i < n_reloads; i++)
5086    if (reload_override_in[i]
5087	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
5088      return 1;
5089  return 0;
5090}
5091
5092/* Give an error message saying we failed to find a reload for INSN,
5093   and clear out reload R.  */
5094static void
5095failed_reload (rtx insn, int r)
5096{
5097  if (asm_noperands (PATTERN (insn)) < 0)
5098    /* It's the compiler's fault.  */
5099    fatal_insn ("could not find a spill register", insn);
5100
5101  /* It's the user's fault; the operand's mode and constraint
5102     don't match.  Disable this reload so we don't crash in final.  */
5103  error_for_asm (insn,
5104		 "%<asm%> operand constraint incompatible with operand size");
5105  rld[r].in = 0;
5106  rld[r].out = 0;
5107  rld[r].reg_rtx = 0;
5108  rld[r].optional = 1;
5109  rld[r].secondary_p = 1;
5110}
5111
5112/* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5113   for reload R.  If it's valid, get an rtx for it.  Return nonzero if
5114   successful.  */
5115static int
5116set_reload_reg (int i, int r)
5117{
5118  int regno;
5119  rtx reg = spill_reg_rtx[i];
5120
5121  if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5122    spill_reg_rtx[i] = reg
5123      = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5124
5125  regno = true_regnum (reg);
5126
5127  /* Detect when the reload reg can't hold the reload mode.
5128     This used to be one `if', but Sequent compiler can't handle that.  */
5129  if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5130    {
5131      enum machine_mode test_mode = VOIDmode;
5132      if (rld[r].in)
5133	test_mode = GET_MODE (rld[r].in);
5134      /* If rld[r].in has VOIDmode, it means we will load it
5135	 in whatever mode the reload reg has: to wit, rld[r].mode.
5136	 We have already tested that for validity.  */
5137      /* Aside from that, we need to test that the expressions
5138	 to reload from or into have modes which are valid for this
5139	 reload register.  Otherwise the reload insns would be invalid.  */
5140      if (! (rld[r].in != 0 && test_mode != VOIDmode
5141	     && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5142	if (! (rld[r].out != 0
5143	       && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5144	  {
5145	    /* The reg is OK.  */
5146	    last_spill_reg = i;
5147
5148	    /* Mark as in use for this insn the reload regs we use
5149	       for this.  */
5150	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5151				    rld[r].when_needed, rld[r].mode);
5152
5153	    rld[r].reg_rtx = reg;
5154	    reload_spill_index[r] = spill_regs[i];
5155	    return 1;
5156	  }
5157    }
5158  return 0;
5159}
5160
5161/* Find a spill register to use as a reload register for reload R.
5162   LAST_RELOAD is nonzero if this is the last reload for the insn being
5163   processed.
5164
5165   Set rld[R].reg_rtx to the register allocated.
5166
5167   We return 1 if successful, or 0 if we couldn't find a spill reg and
5168   we didn't change anything.  */
5169
5170static int
5171allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5172		     int last_reload)
5173{
5174  int i, pass, count;
5175
5176  /* If we put this reload ahead, thinking it is a group,
5177     then insist on finding a group.  Otherwise we can grab a
5178     reg that some other reload needs.
5179     (That can happen when we have a 68000 DATA_OR_FP_REG
5180     which is a group of data regs or one fp reg.)
5181     We need not be so restrictive if there are no more reloads
5182     for this insn.
5183
5184     ??? Really it would be nicer to have smarter handling
5185     for that kind of reg class, where a problem like this is normal.
5186     Perhaps those classes should be avoided for reloading
5187     by use of more alternatives.  */
5188
5189  int force_group = rld[r].nregs > 1 && ! last_reload;
5190
5191  /* If we want a single register and haven't yet found one,
5192     take any reg in the right class and not in use.
5193     If we want a consecutive group, here is where we look for it.
5194
5195     We use two passes so we can first look for reload regs to
5196     reuse, which are already in use for other reloads in this insn,
5197     and only then use additional registers.
5198     I think that maximizing reuse is needed to make sure we don't
5199     run out of reload regs.  Suppose we have three reloads, and
5200     reloads A and B can share regs.  These need two regs.
5201     Suppose A and B are given different regs.
5202     That leaves none for C.  */
5203  for (pass = 0; pass < 2; pass++)
5204    {
5205      /* I is the index in spill_regs.
5206	 We advance it round-robin between insns to use all spill regs
5207	 equally, so that inherited reloads have a chance
5208	 of leapfrogging each other.  */
5209
5210      i = last_spill_reg;
5211
5212      for (count = 0; count < n_spills; count++)
5213	{
5214	  int class = (int) rld[r].class;
5215	  int regnum;
5216
5217	  i++;
5218	  if (i >= n_spills)
5219	    i -= n_spills;
5220	  regnum = spill_regs[i];
5221
5222	  if ((reload_reg_free_p (regnum, rld[r].opnum,
5223				  rld[r].when_needed)
5224	       || (rld[r].in
5225		   /* We check reload_reg_used to make sure we
5226		      don't clobber the return register.  */
5227		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5228		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5229					rld[r].when_needed, rld[r].in,
5230					rld[r].out, r, 1)))
5231	      && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5232	      && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5233	      /* Look first for regs to share, then for unshared.  But
5234		 don't share regs used for inherited reloads; they are
5235		 the ones we want to preserve.  */
5236	      && (pass
5237		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5238					 regnum)
5239		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5240					      regnum))))
5241	    {
5242	      int nr = hard_regno_nregs[regnum][rld[r].mode];
5243	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5244		 (on 68000) got us two FP regs.  If NR is 1,
5245		 we would reject both of them.  */
5246	      if (force_group)
5247		nr = rld[r].nregs;
5248	      /* If we need only one reg, we have already won.  */
5249	      if (nr == 1)
5250		{
5251		  /* But reject a single reg if we demand a group.  */
5252		  if (force_group)
5253		    continue;
5254		  break;
5255		}
5256	      /* Otherwise check that as many consecutive regs as we need
5257		 are available here.  */
5258	      while (nr > 1)
5259		{
5260		  int regno = regnum + nr - 1;
5261		  if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5262			&& spill_reg_order[regno] >= 0
5263			&& reload_reg_free_p (regno, rld[r].opnum,
5264					      rld[r].when_needed)))
5265		    break;
5266		  nr--;
5267		}
5268	      if (nr == 1)
5269		break;
5270	    }
5271	}
5272
5273      /* If we found something on pass 1, omit pass 2.  */
5274      if (count < n_spills)
5275	break;
5276    }
5277
5278  /* We should have found a spill register by now.  */
5279  if (count >= n_spills)
5280    return 0;
5281
5282  /* I is the index in SPILL_REG_RTX of the reload register we are to
5283     allocate.  Get an rtx for it and find its register number.  */
5284
5285  return set_reload_reg (i, r);
5286}
5287
5288/* Initialize all the tables needed to allocate reload registers.
5289   CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5290   is the array we use to restore the reg_rtx field for every reload.  */
5291
5292static void
5293choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5294{
5295  int i;
5296
5297  for (i = 0; i < n_reloads; i++)
5298    rld[i].reg_rtx = save_reload_reg_rtx[i];
5299
5300  memset (reload_inherited, 0, MAX_RELOADS);
5301  memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5302  memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5303
5304  CLEAR_HARD_REG_SET (reload_reg_used);
5305  CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5306  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5307  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5308  CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5309  CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5310
5311  CLEAR_HARD_REG_SET (reg_used_in_insn);
5312  {
5313    HARD_REG_SET tmp;
5314    REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5315    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5316    REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5317    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5318    compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5319    compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5320  }
5321
5322  for (i = 0; i < reload_n_operands; i++)
5323    {
5324      CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5325      CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5326      CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5327      CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5328      CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5329      CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5330    }
5331
5332  COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5333
5334  CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5335
5336  for (i = 0; i < n_reloads; i++)
5337    /* If we have already decided to use a certain register,
5338       don't use it in another way.  */
5339    if (rld[i].reg_rtx)
5340      mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5341			      rld[i].when_needed, rld[i].mode);
5342}
5343
5344/* Assign hard reg targets for the pseudo-registers we must reload
5345   into hard regs for this insn.
5346   Also output the instructions to copy them in and out of the hard regs.
5347
5348   For machines with register classes, we are responsible for
5349   finding a reload reg in the proper class.  */
5350
5351static void
5352choose_reload_regs (struct insn_chain *chain)
5353{
5354  rtx insn = chain->insn;
5355  int i, j;
5356  unsigned int max_group_size = 1;
5357  enum reg_class group_class = NO_REGS;
5358  int pass, win, inheritance;
5359
5360  rtx save_reload_reg_rtx[MAX_RELOADS];
5361
5362  /* In order to be certain of getting the registers we need,
5363     we must sort the reloads into order of increasing register class.
5364     Then our grabbing of reload registers will parallel the process
5365     that provided the reload registers.
5366
5367     Also note whether any of the reloads wants a consecutive group of regs.
5368     If so, record the maximum size of the group desired and what
5369     register class contains all the groups needed by this insn.  */
5370
5371  for (j = 0; j < n_reloads; j++)
5372    {
5373      reload_order[j] = j;
5374      reload_spill_index[j] = -1;
5375
5376      if (rld[j].nregs > 1)
5377	{
5378	  max_group_size = MAX (rld[j].nregs, max_group_size);
5379	  group_class
5380	    = reg_class_superunion[(int) rld[j].class][(int) group_class];
5381	}
5382
5383      save_reload_reg_rtx[j] = rld[j].reg_rtx;
5384    }
5385
5386  if (n_reloads > 1)
5387    qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5388
5389  /* If -O, try first with inheritance, then turning it off.
5390     If not -O, don't do inheritance.
5391     Using inheritance when not optimizing leads to paradoxes
5392     with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5393     because one side of the comparison might be inherited.  */
5394  win = 0;
5395  for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5396    {
5397      choose_reload_regs_init (chain, save_reload_reg_rtx);
5398
5399      /* Process the reloads in order of preference just found.
5400	 Beyond this point, subregs can be found in reload_reg_rtx.
5401
5402	 This used to look for an existing reloaded home for all of the
5403	 reloads, and only then perform any new reloads.  But that could lose
5404	 if the reloads were done out of reg-class order because a later
5405	 reload with a looser constraint might have an old home in a register
5406	 needed by an earlier reload with a tighter constraint.
5407
5408	 To solve this, we make two passes over the reloads, in the order
5409	 described above.  In the first pass we try to inherit a reload
5410	 from a previous insn.  If there is a later reload that needs a
5411	 class that is a proper subset of the class being processed, we must
5412	 also allocate a spill register during the first pass.
5413
5414	 Then make a second pass over the reloads to allocate any reloads
5415	 that haven't been given registers yet.  */
5416
5417      for (j = 0; j < n_reloads; j++)
5418	{
5419	  int r = reload_order[j];
5420	  rtx search_equiv = NULL_RTX;
5421
5422	  /* Ignore reloads that got marked inoperative.  */
5423	  if (rld[r].out == 0 && rld[r].in == 0
5424	      && ! rld[r].secondary_p)
5425	    continue;
5426
5427	  /* If find_reloads chose to use reload_in or reload_out as a reload
5428	     register, we don't need to chose one.  Otherwise, try even if it
5429	     found one since we might save an insn if we find the value lying
5430	     around.
5431	     Try also when reload_in is a pseudo without a hard reg.  */
5432	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
5433	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5434		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5435		      && !MEM_P (rld[r].in)
5436		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5437	    continue;
5438
5439#if 0 /* No longer needed for correct operation.
5440	 It might give better code, or might not; worth an experiment?  */
5441	  /* If this is an optional reload, we can't inherit from earlier insns
5442	     until we are sure that any non-optional reloads have been allocated.
5443	     The following code takes advantage of the fact that optional reloads
5444	     are at the end of reload_order.  */
5445	  if (rld[r].optional != 0)
5446	    for (i = 0; i < j; i++)
5447	      if ((rld[reload_order[i]].out != 0
5448		   || rld[reload_order[i]].in != 0
5449		   || rld[reload_order[i]].secondary_p)
5450		  && ! rld[reload_order[i]].optional
5451		  && rld[reload_order[i]].reg_rtx == 0)
5452		allocate_reload_reg (chain, reload_order[i], 0);
5453#endif
5454
5455	  /* First see if this pseudo is already available as reloaded
5456	     for a previous insn.  We cannot try to inherit for reloads
5457	     that are smaller than the maximum number of registers needed
5458	     for groups unless the register we would allocate cannot be used
5459	     for the groups.
5460
5461	     We could check here to see if this is a secondary reload for
5462	     an object that is already in a register of the desired class.
5463	     This would avoid the need for the secondary reload register.
5464	     But this is complex because we can't easily determine what
5465	     objects might want to be loaded via this reload.  So let a
5466	     register be allocated here.  In `emit_reload_insns' we suppress
5467	     one of the loads in the case described above.  */
5468
5469	  if (inheritance)
5470	    {
5471	      int byte = 0;
5472	      int regno = -1;
5473	      enum machine_mode mode = VOIDmode;
5474
5475	      if (rld[r].in == 0)
5476		;
5477	      else if (REG_P (rld[r].in))
5478		{
5479		  regno = REGNO (rld[r].in);
5480		  mode = GET_MODE (rld[r].in);
5481		}
5482	      else if (REG_P (rld[r].in_reg))
5483		{
5484		  regno = REGNO (rld[r].in_reg);
5485		  mode = GET_MODE (rld[r].in_reg);
5486		}
5487	      else if (GET_CODE (rld[r].in_reg) == SUBREG
5488		       && REG_P (SUBREG_REG (rld[r].in_reg)))
5489		{
5490		  byte = SUBREG_BYTE (rld[r].in_reg);
5491		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
5492		  if (regno < FIRST_PSEUDO_REGISTER)
5493		    regno = subreg_regno (rld[r].in_reg);
5494		  mode = GET_MODE (rld[r].in_reg);
5495		}
5496#ifdef AUTO_INC_DEC
5497	      else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5498			|| GET_CODE (rld[r].in_reg) == PRE_DEC
5499			|| GET_CODE (rld[r].in_reg) == POST_INC
5500			|| GET_CODE (rld[r].in_reg) == POST_DEC)
5501		       && REG_P (XEXP (rld[r].in_reg, 0)))
5502		{
5503		  regno = REGNO (XEXP (rld[r].in_reg, 0));
5504		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5505		  rld[r].out = rld[r].in;
5506		}
5507#endif
5508#if 0
5509	      /* This won't work, since REGNO can be a pseudo reg number.
5510		 Also, it takes much more hair to keep track of all the things
5511		 that can invalidate an inherited reload of part of a pseudoreg.  */
5512	      else if (GET_CODE (rld[r].in) == SUBREG
5513		       && REG_P (SUBREG_REG (rld[r].in)))
5514		regno = subreg_regno (rld[r].in);
5515#endif
5516
5517	      if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5518		{
5519		  enum reg_class class = rld[r].class, last_class;
5520		  rtx last_reg = reg_last_reload_reg[regno];
5521		  enum machine_mode need_mode;
5522
5523		  i = REGNO (last_reg);
5524		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5525		  last_class = REGNO_REG_CLASS (i);
5526
5527		  if (byte == 0)
5528		    need_mode = mode;
5529		  else
5530		    need_mode
5531		      = smallest_mode_for_size (GET_MODE_BITSIZE (mode)
5532						+ byte * BITS_PER_UNIT,
5533						GET_MODE_CLASS (mode));
5534
5535		  if ((GET_MODE_SIZE (GET_MODE (last_reg))
5536		       >= GET_MODE_SIZE (need_mode))
5537#ifdef CANNOT_CHANGE_MODE_CLASS
5538		      /* Verify that the register in "i" can be obtained
5539			 from LAST_REG.  */
5540		      && !REG_CANNOT_CHANGE_MODE_P (REGNO (last_reg),
5541						    GET_MODE (last_reg),
5542						    mode)
5543#endif
5544		      && reg_reloaded_contents[i] == regno
5545		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5546		      && HARD_REGNO_MODE_OK (i, rld[r].mode)
5547		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5548			  /* Even if we can't use this register as a reload
5549			     register, we might use it for reload_override_in,
5550			     if copying it to the desired class is cheap
5551			     enough.  */
5552			  || ((REGISTER_MOVE_COST (mode, last_class, class)
5553			       < MEMORY_MOVE_COST (mode, class, 1))
5554#ifdef SECONDARY_INPUT_RELOAD_CLASS
5555			      && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5556								last_reg)
5557				  == NO_REGS)
5558#endif
5559#ifdef SECONDARY_MEMORY_NEEDED
5560			      && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5561							    mode)
5562#endif
5563			      ))
5564
5565		      && (rld[r].nregs == max_group_size
5566			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5567						  i))
5568		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5569					   rld[r].when_needed, rld[r].in,
5570					   const0_rtx, r, 1))
5571		    {
5572		      /* If a group is needed, verify that all the subsequent
5573			 registers still have their values intact.  */
5574		      int nr = hard_regno_nregs[i][rld[r].mode];
5575		      int k;
5576
5577		      for (k = 1; k < nr; k++)
5578			if (reg_reloaded_contents[i + k] != regno
5579			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5580			  break;
5581
5582		      if (k == nr)
5583			{
5584			  int i1;
5585			  int bad_for_class;
5586
5587			  last_reg = (GET_MODE (last_reg) == mode
5588				      ? last_reg : gen_rtx_REG (mode, i));
5589
5590			  bad_for_class = 0;
5591			  for (k = 0; k < nr; k++)
5592			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5593								  i+k);
5594
5595			  /* We found a register that contains the
5596			     value we need.  If this register is the
5597			     same as an `earlyclobber' operand of the
5598			     current insn, just mark it as a place to
5599			     reload from since we can't use it as the
5600			     reload register itself.  */
5601
5602			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
5603			    if (reg_overlap_mentioned_for_reload_p
5604				(reg_last_reload_reg[regno],
5605				 reload_earlyclobbers[i1]))
5606			      break;
5607
5608			  if (i1 != n_earlyclobbers
5609			      || ! (free_for_value_p (i, rld[r].mode,
5610						      rld[r].opnum,
5611						      rld[r].when_needed, rld[r].in,
5612						      rld[r].out, r, 1))
5613			      /* Don't use it if we'd clobber a pseudo reg.  */
5614			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5615				  && rld[r].out
5616				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5617			      /* Don't clobber the frame pointer.  */
5618			      || (i == HARD_FRAME_POINTER_REGNUM
5619				  && frame_pointer_needed
5620				  && rld[r].out)
5621			      /* Don't really use the inherited spill reg
5622				 if we need it wider than we've got it.  */
5623			      || (GET_MODE_SIZE (rld[r].mode)
5624				  > GET_MODE_SIZE (mode))
5625			      || bad_for_class
5626
5627			      /* If find_reloads chose reload_out as reload
5628				 register, stay with it - that leaves the
5629				 inherited register for subsequent reloads.  */
5630			      || (rld[r].out && rld[r].reg_rtx
5631				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5632			    {
5633			      if (! rld[r].optional)
5634				{
5635				  reload_override_in[r] = last_reg;
5636				  reload_inheritance_insn[r]
5637				    = reg_reloaded_insn[i];
5638				}
5639			    }
5640			  else
5641			    {
5642			      int k;
5643			      /* We can use this as a reload reg.  */
5644			      /* Mark the register as in use for this part of
5645				 the insn.  */
5646			      mark_reload_reg_in_use (i,
5647						      rld[r].opnum,
5648						      rld[r].when_needed,
5649						      rld[r].mode);
5650			      rld[r].reg_rtx = last_reg;
5651			      reload_inherited[r] = 1;
5652			      reload_inheritance_insn[r]
5653				= reg_reloaded_insn[i];
5654			      reload_spill_index[r] = i;
5655			      for (k = 0; k < nr; k++)
5656				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5657						  i + k);
5658			    }
5659			}
5660		    }
5661		}
5662	    }
5663
5664	  /* Here's another way to see if the value is already lying around.  */
5665	  if (inheritance
5666	      && rld[r].in != 0
5667	      && ! reload_inherited[r]
5668	      && rld[r].out == 0
5669	      && (CONSTANT_P (rld[r].in)
5670		  || GET_CODE (rld[r].in) == PLUS
5671		  || REG_P (rld[r].in)
5672		  || MEM_P (rld[r].in))
5673	      && (rld[r].nregs == max_group_size
5674		  || ! reg_classes_intersect_p (rld[r].class, group_class)))
5675	    search_equiv = rld[r].in;
5676	  /* If this is an output reload from a simple move insn, look
5677	     if an equivalence for the input is available.  */
5678	  else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5679	    {
5680	      rtx set = single_set (insn);
5681
5682	      if (set
5683		  && rtx_equal_p (rld[r].out, SET_DEST (set))
5684		  && CONSTANT_P (SET_SRC (set)))
5685		search_equiv = SET_SRC (set);
5686	    }
5687
5688	  if (search_equiv)
5689	    {
5690	      rtx equiv
5691		= find_equiv_reg (search_equiv, insn, rld[r].class,
5692				  -1, NULL, 0, rld[r].mode);
5693	      int regno = 0;
5694
5695	      if (equiv != 0)
5696		{
5697		  if (REG_P (equiv))
5698		    regno = REGNO (equiv);
5699		  else
5700		    {
5701		      /* This must be a SUBREG of a hard register.
5702			 Make a new REG since this might be used in an
5703			 address and not all machines support SUBREGs
5704			 there.  */
5705		      gcc_assert (GET_CODE (equiv) == SUBREG);
5706		      regno = subreg_regno (equiv);
5707		      equiv = gen_rtx_REG (rld[r].mode, regno);
5708		      /* If we choose EQUIV as the reload register, but the
5709			 loop below decides to cancel the inheritance, we'll
5710			 end up reloading EQUIV in rld[r].mode, not the mode
5711			 it had originally.  That isn't safe when EQUIV isn't
5712			 available as a spill register since its value might
5713			 still be live at this point.  */
5714		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
5715			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
5716			  equiv = 0;
5717		    }
5718		}
5719
5720	      /* If we found a spill reg, reject it unless it is free
5721		 and of the desired class.  */
5722	      if (equiv != 0)
5723		{
5724		  int regs_used = 0;
5725		  int bad_for_class = 0;
5726		  int max_regno = regno + rld[r].nregs;
5727
5728		  for (i = regno; i < max_regno; i++)
5729		    {
5730		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
5731						      i);
5732		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5733							   i);
5734		    }
5735
5736		  if ((regs_used
5737		       && ! free_for_value_p (regno, rld[r].mode,
5738					      rld[r].opnum, rld[r].when_needed,
5739					      rld[r].in, rld[r].out, r, 1))
5740		      || bad_for_class)
5741		    equiv = 0;
5742		}
5743
5744	      if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5745		equiv = 0;
5746
5747	      /* We found a register that contains the value we need.
5748		 If this register is the same as an `earlyclobber' operand
5749		 of the current insn, just mark it as a place to reload from
5750		 since we can't use it as the reload register itself.  */
5751
5752	      if (equiv != 0)
5753		for (i = 0; i < n_earlyclobbers; i++)
5754		  if (reg_overlap_mentioned_for_reload_p (equiv,
5755							  reload_earlyclobbers[i]))
5756		    {
5757		      if (! rld[r].optional)
5758			reload_override_in[r] = equiv;
5759		      equiv = 0;
5760		      break;
5761		    }
5762
5763	      /* If the equiv register we have found is explicitly clobbered
5764		 in the current insn, it depends on the reload type if we
5765		 can use it, use it for reload_override_in, or not at all.
5766		 In particular, we then can't use EQUIV for a
5767		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
5768
5769	      if (equiv != 0)
5770		{
5771		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
5772		    switch (rld[r].when_needed)
5773		      {
5774		      case RELOAD_FOR_OTHER_ADDRESS:
5775		      case RELOAD_FOR_INPADDR_ADDRESS:
5776		      case RELOAD_FOR_INPUT_ADDRESS:
5777		      case RELOAD_FOR_OPADDR_ADDR:
5778			break;
5779		      case RELOAD_OTHER:
5780		      case RELOAD_FOR_INPUT:
5781		      case RELOAD_FOR_OPERAND_ADDRESS:
5782			if (! rld[r].optional)
5783			  reload_override_in[r] = equiv;
5784			/* Fall through.  */
5785		      default:
5786			equiv = 0;
5787			break;
5788		      }
5789		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5790		    switch (rld[r].when_needed)
5791		      {
5792		      case RELOAD_FOR_OTHER_ADDRESS:
5793		      case RELOAD_FOR_INPADDR_ADDRESS:
5794		      case RELOAD_FOR_INPUT_ADDRESS:
5795		      case RELOAD_FOR_OPADDR_ADDR:
5796		      case RELOAD_FOR_OPERAND_ADDRESS:
5797		      case RELOAD_FOR_INPUT:
5798			break;
5799		      case RELOAD_OTHER:
5800			if (! rld[r].optional)
5801			  reload_override_in[r] = equiv;
5802			/* Fall through.  */
5803		      default:
5804			equiv = 0;
5805			break;
5806		      }
5807		}
5808
5809	      /* If we found an equivalent reg, say no code need be generated
5810		 to load it, and use it as our reload reg.  */
5811	      if (equiv != 0
5812		  && (regno != HARD_FRAME_POINTER_REGNUM
5813		      || !frame_pointer_needed))
5814		{
5815		  int nr = hard_regno_nregs[regno][rld[r].mode];
5816		  int k;
5817		  rld[r].reg_rtx = equiv;
5818		  reload_inherited[r] = 1;
5819
5820		  /* If reg_reloaded_valid is not set for this register,
5821		     there might be a stale spill_reg_store lying around.
5822		     We must clear it, since otherwise emit_reload_insns
5823		     might delete the store.  */
5824		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5825		    spill_reg_store[regno] = NULL_RTX;
5826		  /* If any of the hard registers in EQUIV are spill
5827		     registers, mark them as in use for this insn.  */
5828		  for (k = 0; k < nr; k++)
5829		    {
5830		      i = spill_reg_order[regno + k];
5831		      if (i >= 0)
5832			{
5833			  mark_reload_reg_in_use (regno, rld[r].opnum,
5834						  rld[r].when_needed,
5835						  rld[r].mode);
5836			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5837					    regno + k);
5838			}
5839		    }
5840		}
5841	    }
5842
5843	  /* If we found a register to use already, or if this is an optional
5844	     reload, we are done.  */
5845	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5846	    continue;
5847
5848#if 0
5849	  /* No longer needed for correct operation.  Might or might
5850	     not give better code on the average.  Want to experiment?  */
5851
5852	  /* See if there is a later reload that has a class different from our
5853	     class that intersects our class or that requires less register
5854	     than our reload.  If so, we must allocate a register to this
5855	     reload now, since that reload might inherit a previous reload
5856	     and take the only available register in our class.  Don't do this
5857	     for optional reloads since they will force all previous reloads
5858	     to be allocated.  Also don't do this for reloads that have been
5859	     turned off.  */
5860
5861	  for (i = j + 1; i < n_reloads; i++)
5862	    {
5863	      int s = reload_order[i];
5864
5865	      if ((rld[s].in == 0 && rld[s].out == 0
5866		   && ! rld[s].secondary_p)
5867		  || rld[s].optional)
5868		continue;
5869
5870	      if ((rld[s].class != rld[r].class
5871		   && reg_classes_intersect_p (rld[r].class,
5872					       rld[s].class))
5873		  || rld[s].nregs < rld[r].nregs)
5874		break;
5875	    }
5876
5877	  if (i == n_reloads)
5878	    continue;
5879
5880	  allocate_reload_reg (chain, r, j == n_reloads - 1);
5881#endif
5882	}
5883
5884      /* Now allocate reload registers for anything non-optional that
5885	 didn't get one yet.  */
5886      for (j = 0; j < n_reloads; j++)
5887	{
5888	  int r = reload_order[j];
5889
5890	  /* Ignore reloads that got marked inoperative.  */
5891	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5892	    continue;
5893
5894	  /* Skip reloads that already have a register allocated or are
5895	     optional.  */
5896	  if (rld[r].reg_rtx != 0 || rld[r].optional)
5897	    continue;
5898
5899	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5900	    break;
5901	}
5902
5903      /* If that loop got all the way, we have won.  */
5904      if (j == n_reloads)
5905	{
5906	  win = 1;
5907	  break;
5908	}
5909
5910      /* Loop around and try without any inheritance.  */
5911    }
5912
5913  if (! win)
5914    {
5915      /* First undo everything done by the failed attempt
5916	 to allocate with inheritance.  */
5917      choose_reload_regs_init (chain, save_reload_reg_rtx);
5918
5919      /* Some sanity tests to verify that the reloads found in the first
5920	 pass are identical to the ones we have now.  */
5921      gcc_assert (chain->n_reloads == n_reloads);
5922
5923      for (i = 0; i < n_reloads; i++)
5924	{
5925	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
5926	    continue;
5927	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
5928	  for (j = 0; j < n_spills; j++)
5929	    if (spill_regs[j] == chain->rld[i].regno)
5930	      if (! set_reload_reg (j, i))
5931		failed_reload (chain->insn, i);
5932	}
5933    }
5934
5935  /* If we thought we could inherit a reload, because it seemed that
5936     nothing else wanted the same reload register earlier in the insn,
5937     verify that assumption, now that all reloads have been assigned.
5938     Likewise for reloads where reload_override_in has been set.  */
5939
5940  /* If doing expensive optimizations, do one preliminary pass that doesn't
5941     cancel any inheritance, but removes reloads that have been needed only
5942     for reloads that we know can be inherited.  */
5943  for (pass = flag_expensive_optimizations; pass >= 0; pass--)
5944    {
5945      for (j = 0; j < n_reloads; j++)
5946	{
5947	  int r = reload_order[j];
5948	  rtx check_reg;
5949	  if (reload_inherited[r] && rld[r].reg_rtx)
5950	    check_reg = rld[r].reg_rtx;
5951	  else if (reload_override_in[r]
5952		   && (REG_P (reload_override_in[r])
5953		       || GET_CODE (reload_override_in[r]) == SUBREG))
5954	    check_reg = reload_override_in[r];
5955	  else
5956	    continue;
5957	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
5958				  rld[r].opnum, rld[r].when_needed, rld[r].in,
5959				  (reload_inherited[r]
5960				   ? rld[r].out : const0_rtx),
5961				  r, 1))
5962	    {
5963	      if (pass)
5964		continue;
5965	      reload_inherited[r] = 0;
5966	      reload_override_in[r] = 0;
5967	    }
5968	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5969	     reload_override_in, then we do not need its related
5970	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5971	     likewise for other reload types.
5972	     We handle this by removing a reload when its only replacement
5973	     is mentioned in reload_in of the reload we are going to inherit.
5974	     A special case are auto_inc expressions; even if the input is
5975	     inherited, we still need the address for the output.  We can
5976	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
5977	     If we succeeded removing some reload and we are doing a preliminary
5978	     pass just to remove such reloads, make another pass, since the
5979	     removal of one reload might allow us to inherit another one.  */
5980	  else if (rld[r].in
5981		   && rld[r].out != rld[r].in
5982		   && remove_address_replacements (rld[r].in) && pass)
5983	    pass = 2;
5984	}
5985    }
5986
5987  /* Now that reload_override_in is known valid,
5988     actually override reload_in.  */
5989  for (j = 0; j < n_reloads; j++)
5990    if (reload_override_in[j])
5991      rld[j].in = reload_override_in[j];
5992
5993  /* If this reload won't be done because it has been canceled or is
5994     optional and not inherited, clear reload_reg_rtx so other
5995     routines (such as subst_reloads) don't get confused.  */
5996  for (j = 0; j < n_reloads; j++)
5997    if (rld[j].reg_rtx != 0
5998	&& ((rld[j].optional && ! reload_inherited[j])
5999	    || (rld[j].in == 0 && rld[j].out == 0
6000		&& ! rld[j].secondary_p)))
6001      {
6002	int regno = true_regnum (rld[j].reg_rtx);
6003
6004	if (spill_reg_order[regno] >= 0)
6005	  clear_reload_reg_in_use (regno, rld[j].opnum,
6006				   rld[j].when_needed, rld[j].mode);
6007	rld[j].reg_rtx = 0;
6008	reload_spill_index[j] = -1;
6009      }
6010
6011  /* Record which pseudos and which spill regs have output reloads.  */
6012  for (j = 0; j < n_reloads; j++)
6013    {
6014      int r = reload_order[j];
6015
6016      i = reload_spill_index[r];
6017
6018      /* I is nonneg if this reload uses a register.
6019	 If rld[r].reg_rtx is 0, this is an optional reload
6020	 that we opted to ignore.  */
6021      if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6022	  && rld[r].reg_rtx != 0)
6023	{
6024	  int nregno = REGNO (rld[r].out_reg);
6025	  int nr = 1;
6026
6027	  if (nregno < FIRST_PSEUDO_REGISTER)
6028	    nr = hard_regno_nregs[nregno][rld[r].mode];
6029
6030	  while (--nr >= 0)
6031	    reg_has_output_reload[nregno + nr] = 1;
6032
6033	  if (i >= 0)
6034	    {
6035	      nr = hard_regno_nregs[i][rld[r].mode];
6036	      while (--nr >= 0)
6037		SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6038	    }
6039
6040	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
6041		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
6042		      || rld[r].when_needed == RELOAD_FOR_INSN);
6043	}
6044    }
6045}
6046
6047/* Deallocate the reload register for reload R.  This is called from
6048   remove_address_replacements.  */
6049
6050void
6051deallocate_reload_reg (int r)
6052{
6053  int regno;
6054
6055  if (! rld[r].reg_rtx)
6056    return;
6057  regno = true_regnum (rld[r].reg_rtx);
6058  rld[r].reg_rtx = 0;
6059  if (spill_reg_order[regno] >= 0)
6060    clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6061			     rld[r].mode);
6062  reload_spill_index[r] = -1;
6063}
6064
6065/* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6066   reloads of the same item for fear that we might not have enough reload
6067   registers. However, normally they will get the same reload register
6068   and hence actually need not be loaded twice.
6069
6070   Here we check for the most common case of this phenomenon: when we have
6071   a number of reloads for the same object, each of which were allocated
6072   the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6073   reload, and is not modified in the insn itself.  If we find such,
6074   merge all the reloads and set the resulting reload to RELOAD_OTHER.
6075   This will not increase the number of spill registers needed and will
6076   prevent redundant code.  */
6077
6078static void
6079merge_assigned_reloads (rtx insn)
6080{
6081  int i, j;
6082
6083  /* Scan all the reloads looking for ones that only load values and
6084     are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6085     assigned and not modified by INSN.  */
6086
6087  for (i = 0; i < n_reloads; i++)
6088    {
6089      int conflicting_input = 0;
6090      int max_input_address_opnum = -1;
6091      int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6092
6093      if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6094	  || rld[i].out != 0 || rld[i].reg_rtx == 0
6095	  || reg_set_p (rld[i].reg_rtx, insn))
6096	continue;
6097
6098      /* Look at all other reloads.  Ensure that the only use of this
6099	 reload_reg_rtx is in a reload that just loads the same value
6100	 as we do.  Note that any secondary reloads must be of the identical
6101	 class since the values, modes, and result registers are the
6102	 same, so we need not do anything with any secondary reloads.  */
6103
6104      for (j = 0; j < n_reloads; j++)
6105	{
6106	  if (i == j || rld[j].reg_rtx == 0
6107	      || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6108					    rld[i].reg_rtx))
6109	    continue;
6110
6111	  if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6112	      && rld[j].opnum > max_input_address_opnum)
6113	    max_input_address_opnum = rld[j].opnum;
6114
6115	  /* If the reload regs aren't exactly the same (e.g, different modes)
6116	     or if the values are different, we can't merge this reload.
6117	     But if it is an input reload, we might still merge
6118	     RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads.  */
6119
6120	  if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6121	      || rld[j].out != 0 || rld[j].in == 0
6122	      || ! rtx_equal_p (rld[i].in, rld[j].in))
6123	    {
6124	      if (rld[j].when_needed != RELOAD_FOR_INPUT
6125		  || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6126		       || rld[i].opnum > rld[j].opnum)
6127		      && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6128		break;
6129	      conflicting_input = 1;
6130	      if (min_conflicting_input_opnum > rld[j].opnum)
6131		min_conflicting_input_opnum = rld[j].opnum;
6132	    }
6133	}
6134
6135      /* If all is OK, merge the reloads.  Only set this to RELOAD_OTHER if
6136	 we, in fact, found any matching reloads.  */
6137
6138      if (j == n_reloads
6139	  && max_input_address_opnum <= min_conflicting_input_opnum)
6140	{
6141	  gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
6142
6143	  for (j = 0; j < n_reloads; j++)
6144	    if (i != j && rld[j].reg_rtx != 0
6145		&& rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6146		&& (! conflicting_input
6147		    || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6148		    || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6149	      {
6150		rld[i].when_needed = RELOAD_OTHER;
6151		rld[j].in = 0;
6152		reload_spill_index[j] = -1;
6153		transfer_replacements (i, j);
6154	      }
6155
6156	  /* If this is now RELOAD_OTHER, look for any reloads that load
6157	     parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6158	     if they were for inputs, RELOAD_OTHER for outputs.  Note that
6159	     this test is equivalent to looking for reloads for this operand
6160	     number.  */
6161	  /* We must take special care with RELOAD_FOR_OUTPUT_ADDRESS; it may
6162	     share registers with a RELOAD_FOR_INPUT, so we can not change it
6163	     to RELOAD_FOR_OTHER_ADDRESS.  We should never need to, since we
6164	     do not modify RELOAD_FOR_OUTPUT.  */
6165
6166	  if (rld[i].when_needed == RELOAD_OTHER)
6167	    for (j = 0; j < n_reloads; j++)
6168	      if (rld[j].in != 0
6169		  && rld[j].when_needed != RELOAD_OTHER
6170		  && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6171		  && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
6172		  && (! conflicting_input
6173		      || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6174		      || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6175		  && reg_overlap_mentioned_for_reload_p (rld[j].in,
6176							 rld[i].in))
6177		{
6178		  int k;
6179
6180		  rld[j].when_needed
6181		    = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6182			|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6183		       ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6184
6185		  /* Check to see if we accidentally converted two
6186		     reloads that use the same reload register with
6187		     different inputs to the same type.  If so, the
6188		     resulting code won't work.  */
6189		  if (rld[j].reg_rtx)
6190		    for (k = 0; k < j; k++)
6191		      gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
6192				  || rld[k].when_needed != rld[j].when_needed
6193				  || !rtx_equal_p (rld[k].reg_rtx,
6194						   rld[j].reg_rtx)
6195				  || rtx_equal_p (rld[k].in,
6196						  rld[j].in));
6197		}
6198	}
6199    }
6200}
6201
6202/* These arrays are filled by emit_reload_insns and its subroutines.  */
6203static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6204static rtx other_input_address_reload_insns = 0;
6205static rtx other_input_reload_insns = 0;
6206static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6207static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6208static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6209static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6210static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6211static rtx operand_reload_insns = 0;
6212static rtx other_operand_reload_insns = 0;
6213static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6214
6215/* Values to be put in spill_reg_store are put here first.  */
6216static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6217static HARD_REG_SET reg_reloaded_died;
6218
6219/* Generate insns to perform reload RL, which is for the insn in CHAIN and
6220   has the number J.  OLD contains the value to be used as input.  */
6221
6222static void
6223emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6224			 rtx old, int j)
6225{
6226  rtx insn = chain->insn;
6227  rtx reloadreg = rl->reg_rtx;
6228  rtx oldequiv_reg = 0;
6229  rtx oldequiv = 0;
6230  int special = 0;
6231  enum machine_mode mode;
6232  rtx *where;
6233
6234  /* Determine the mode to reload in.
6235     This is very tricky because we have three to choose from.
6236     There is the mode the insn operand wants (rl->inmode).
6237     There is the mode of the reload register RELOADREG.
6238     There is the intrinsic mode of the operand, which we could find
6239     by stripping some SUBREGs.
6240     It turns out that RELOADREG's mode is irrelevant:
6241     we can change that arbitrarily.
6242
6243     Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6244     then the reload reg may not support QImode moves, so use SImode.
6245     If foo is in memory due to spilling a pseudo reg, this is safe,
6246     because the QImode value is in the least significant part of a
6247     slot big enough for a SImode.  If foo is some other sort of
6248     memory reference, then it is impossible to reload this case,
6249     so previous passes had better make sure this never happens.
6250
6251     Then consider a one-word union which has SImode and one of its
6252     members is a float, being fetched as (SUBREG:SF union:SI).
6253     We must fetch that as SFmode because we could be loading into
6254     a float-only register.  In this case OLD's mode is correct.
6255
6256     Consider an immediate integer: it has VOIDmode.  Here we need
6257     to get a mode from something else.
6258
6259     In some cases, there is a fourth mode, the operand's
6260     containing mode.  If the insn specifies a containing mode for
6261     this operand, it overrides all others.
6262
6263     I am not sure whether the algorithm here is always right,
6264     but it does the right things in those cases.  */
6265
6266  mode = GET_MODE (old);
6267  if (mode == VOIDmode)
6268    mode = rl->inmode;
6269
6270#ifdef SECONDARY_INPUT_RELOAD_CLASS
6271  /* If we need a secondary register for this operation, see if
6272     the value is already in a register in that class.  Don't
6273     do this if the secondary register will be used as a scratch
6274     register.  */
6275
6276  if (rl->secondary_in_reload >= 0
6277      && rl->secondary_in_icode == CODE_FOR_nothing
6278      && optimize)
6279    oldequiv
6280      = find_equiv_reg (old, insn,
6281			rld[rl->secondary_in_reload].class,
6282			-1, NULL, 0, mode);
6283#endif
6284
6285  /* If reloading from memory, see if there is a register
6286     that already holds the same value.  If so, reload from there.
6287     We can pass 0 as the reload_reg_p argument because
6288     any other reload has either already been emitted,
6289     in which case find_equiv_reg will see the reload-insn,
6290     or has yet to be emitted, in which case it doesn't matter
6291     because we will use this equiv reg right away.  */
6292
6293  if (oldequiv == 0 && optimize
6294      && (MEM_P (old)
6295	  || (REG_P (old)
6296	      && REGNO (old) >= FIRST_PSEUDO_REGISTER
6297	      && reg_renumber[REGNO (old)] < 0)))
6298    oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
6299
6300  if (oldequiv)
6301    {
6302      unsigned int regno = true_regnum (oldequiv);
6303
6304      /* Don't use OLDEQUIV if any other reload changes it at an
6305	 earlier stage of this insn or at this stage.  */
6306      if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
6307			      rl->in, const0_rtx, j, 0))
6308	oldequiv = 0;
6309
6310      /* If it is no cheaper to copy from OLDEQUIV into the
6311	 reload register than it would be to move from memory,
6312	 don't use it. Likewise, if we need a secondary register
6313	 or memory.  */
6314
6315      if (oldequiv != 0
6316	  && (((enum reg_class) REGNO_REG_CLASS (regno) != rl->class
6317	       && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6318				       rl->class)
6319		   >= MEMORY_MOVE_COST (mode, rl->class, 1)))
6320#ifdef SECONDARY_INPUT_RELOAD_CLASS
6321	      || (SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6322						mode, oldequiv)
6323		  != NO_REGS)
6324#endif
6325#ifdef SECONDARY_MEMORY_NEEDED
6326	      || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6327					  rl->class,
6328					  mode)
6329#endif
6330	      ))
6331	oldequiv = 0;
6332    }
6333
6334  /* delete_output_reload is only invoked properly if old contains
6335     the original pseudo register.  Since this is replaced with a
6336     hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6337     find the pseudo in RELOAD_IN_REG.  */
6338  if (oldequiv == 0
6339      && reload_override_in[j]
6340      && REG_P (rl->in_reg))
6341    {
6342      oldequiv = old;
6343      old = rl->in_reg;
6344    }
6345  if (oldequiv == 0)
6346    oldequiv = old;
6347  else if (REG_P (oldequiv))
6348    oldequiv_reg = oldequiv;
6349  else if (GET_CODE (oldequiv) == SUBREG)
6350    oldequiv_reg = SUBREG_REG (oldequiv);
6351
6352  /* If we are reloading from a register that was recently stored in
6353     with an output-reload, see if we can prove there was
6354     actually no need to store the old value in it.  */
6355
6356  if (optimize && REG_P (oldequiv)
6357      && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6358      && spill_reg_store[REGNO (oldequiv)]
6359      && REG_P (old)
6360      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6361	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6362			  rl->out_reg)))
6363    delete_output_reload (insn, j, REGNO (oldequiv));
6364
6365  /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6366     then load RELOADREG from OLDEQUIV.  Note that we cannot use
6367     gen_lowpart_common since it can do the wrong thing when
6368     RELOADREG has a multi-word mode.  Note that RELOADREG
6369     must always be a REG here.  */
6370
6371  if (GET_MODE (reloadreg) != mode)
6372    reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6373  while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6374    oldequiv = SUBREG_REG (oldequiv);
6375  if (GET_MODE (oldequiv) != VOIDmode
6376      && mode != GET_MODE (oldequiv))
6377    oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6378
6379  /* Switch to the right place to emit the reload insns.  */
6380  switch (rl->when_needed)
6381    {
6382    case RELOAD_OTHER:
6383      where = &other_input_reload_insns;
6384      break;
6385    case RELOAD_FOR_INPUT:
6386      where = &input_reload_insns[rl->opnum];
6387      break;
6388    case RELOAD_FOR_INPUT_ADDRESS:
6389      where = &input_address_reload_insns[rl->opnum];
6390      break;
6391    case RELOAD_FOR_INPADDR_ADDRESS:
6392      where = &inpaddr_address_reload_insns[rl->opnum];
6393      break;
6394    case RELOAD_FOR_OUTPUT_ADDRESS:
6395      where = &output_address_reload_insns[rl->opnum];
6396      break;
6397    case RELOAD_FOR_OUTADDR_ADDRESS:
6398      where = &outaddr_address_reload_insns[rl->opnum];
6399      break;
6400    case RELOAD_FOR_OPERAND_ADDRESS:
6401      where = &operand_reload_insns;
6402      break;
6403    case RELOAD_FOR_OPADDR_ADDR:
6404      where = &other_operand_reload_insns;
6405      break;
6406    case RELOAD_FOR_OTHER_ADDRESS:
6407      where = &other_input_address_reload_insns;
6408      break;
6409    default:
6410      gcc_unreachable ();
6411    }
6412
6413  push_to_sequence (*where);
6414
6415  /* Auto-increment addresses must be reloaded in a special way.  */
6416  if (rl->out && ! rl->out_reg)
6417    {
6418      /* We are not going to bother supporting the case where a
6419	 incremented register can't be copied directly from
6420	 OLDEQUIV since this seems highly unlikely.  */
6421      gcc_assert (rl->secondary_in_reload < 0);
6422
6423      if (reload_inherited[j])
6424	oldequiv = reloadreg;
6425
6426      old = XEXP (rl->in_reg, 0);
6427
6428      if (optimize && REG_P (oldequiv)
6429	  && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6430	  && spill_reg_store[REGNO (oldequiv)]
6431	  && REG_P (old)
6432	  && (dead_or_set_p (insn,
6433			     spill_reg_stored_to[REGNO (oldequiv)])
6434	      || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6435			      old)))
6436	delete_output_reload (insn, j, REGNO (oldequiv));
6437
6438      /* Prevent normal processing of this reload.  */
6439      special = 1;
6440      /* Output a special code sequence for this case.  */
6441      new_spill_reg_store[REGNO (reloadreg)]
6442	= inc_for_reload (reloadreg, oldequiv, rl->out,
6443			  rl->inc);
6444    }
6445
6446  /* If we are reloading a pseudo-register that was set by the previous
6447     insn, see if we can get rid of that pseudo-register entirely
6448     by redirecting the previous insn into our reload register.  */
6449
6450  else if (optimize && REG_P (old)
6451	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
6452	   && dead_or_set_p (insn, old)
6453	   /* This is unsafe if some other reload
6454	      uses the same reg first.  */
6455	   && ! conflicts_with_override (reloadreg)
6456	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6457				rl->when_needed, old, rl->out, j, 0))
6458    {
6459      rtx temp = PREV_INSN (insn);
6460      while (temp && NOTE_P (temp))
6461	temp = PREV_INSN (temp);
6462      if (temp
6463	  && NONJUMP_INSN_P (temp)
6464	  && GET_CODE (PATTERN (temp)) == SET
6465	  && SET_DEST (PATTERN (temp)) == old
6466	  /* Make sure we can access insn_operand_constraint.  */
6467	  && asm_noperands (PATTERN (temp)) < 0
6468	  /* This is unsafe if operand occurs more than once in current
6469	     insn.  Perhaps some occurrences aren't reloaded.  */
6470	  && count_occurrences (PATTERN (insn), old, 0) == 1)
6471	{
6472	  rtx old = SET_DEST (PATTERN (temp));
6473	  /* Store into the reload register instead of the pseudo.  */
6474	  SET_DEST (PATTERN (temp)) = reloadreg;
6475
6476	  /* Verify that resulting insn is valid.  */
6477	  extract_insn (temp);
6478	  if (constrain_operands (1))
6479	    {
6480	      /* If the previous insn is an output reload, the source is
6481		 a reload register, and its spill_reg_store entry will
6482		 contain the previous destination.  This is now
6483		 invalid.  */
6484	      if (REG_P (SET_SRC (PATTERN (temp)))
6485		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6486		{
6487		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6488		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6489		}
6490
6491	      /* If these are the only uses of the pseudo reg,
6492		 pretend for GDB it lives in the reload reg we used.  */
6493	      if (REG_N_DEATHS (REGNO (old)) == 1
6494		  && REG_N_SETS (REGNO (old)) == 1)
6495		{
6496		  reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6497		  alter_reg (REGNO (old), -1);
6498		}
6499	      special = 1;
6500	    }
6501	  else
6502	    {
6503	      SET_DEST (PATTERN (temp)) = old;
6504	    }
6505	}
6506    }
6507
6508  /* We can't do that, so output an insn to load RELOADREG.  */
6509
6510#ifdef SECONDARY_INPUT_RELOAD_CLASS
6511  /* If we have a secondary reload, pick up the secondary register
6512     and icode, if any.  If OLDEQUIV and OLD are different or
6513     if this is an in-out reload, recompute whether or not we
6514     still need a secondary register and what the icode should
6515     be.  If we still need a secondary register and the class or
6516     icode is different, go back to reloading from OLD if using
6517     OLDEQUIV means that we got the wrong type of register.  We
6518     cannot have different class or icode due to an in-out reload
6519     because we don't make such reloads when both the input and
6520     output need secondary reload registers.  */
6521
6522  if (! special && rl->secondary_in_reload >= 0)
6523    {
6524      rtx second_reload_reg = 0;
6525      int secondary_reload = rl->secondary_in_reload;
6526      rtx real_oldequiv = oldequiv;
6527      rtx real_old = old;
6528      rtx tmp;
6529      enum insn_code icode;
6530
6531      /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6532	 and similarly for OLD.
6533	 See comments in get_secondary_reload in reload.c.  */
6534      /* If it is a pseudo that cannot be replaced with its
6535	 equivalent MEM, we must fall back to reload_in, which
6536	 will have all the necessary substitutions registered.
6537	 Likewise for a pseudo that can't be replaced with its
6538	 equivalent constant.
6539
6540	 Take extra care for subregs of such pseudos.  Note that
6541	 we cannot use reg_equiv_mem in this case because it is
6542	 not in the right mode.  */
6543
6544      tmp = oldequiv;
6545      if (GET_CODE (tmp) == SUBREG)
6546	tmp = SUBREG_REG (tmp);
6547      if (REG_P (tmp)
6548	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6549	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6550	      || reg_equiv_constant[REGNO (tmp)] != 0))
6551	{
6552	  if (! reg_equiv_mem[REGNO (tmp)]
6553	      || num_not_at_initial_offset
6554	      || GET_CODE (oldequiv) == SUBREG)
6555	    real_oldequiv = rl->in;
6556	  else
6557	    real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6558	}
6559
6560      tmp = old;
6561      if (GET_CODE (tmp) == SUBREG)
6562	tmp = SUBREG_REG (tmp);
6563      if (REG_P (tmp)
6564	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6565	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6566	      || reg_equiv_constant[REGNO (tmp)] != 0))
6567	{
6568	  if (! reg_equiv_mem[REGNO (tmp)]
6569	      || num_not_at_initial_offset
6570	      || GET_CODE (old) == SUBREG)
6571	    real_old = rl->in;
6572	  else
6573	    real_old = reg_equiv_mem[REGNO (tmp)];
6574	}
6575
6576      second_reload_reg = rld[secondary_reload].reg_rtx;
6577      icode = rl->secondary_in_icode;
6578
6579      if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6580	  || (rl->in != 0 && rl->out != 0))
6581	{
6582	  enum reg_class new_class
6583	    = SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6584					    mode, real_oldequiv);
6585
6586	  if (new_class == NO_REGS)
6587	    second_reload_reg = 0;
6588	  else
6589	    {
6590	      enum insn_code new_icode;
6591	      enum machine_mode new_mode;
6592
6593	      if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6594				       REGNO (second_reload_reg)))
6595		oldequiv = old, real_oldequiv = real_old;
6596	      else
6597		{
6598		  new_icode = reload_in_optab[(int) mode];
6599		  if (new_icode != CODE_FOR_nothing
6600		      && ((insn_data[(int) new_icode].operand[0].predicate
6601			   && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6602				 (reloadreg, mode)))
6603			  || (insn_data[(int) new_icode].operand[1].predicate
6604			      && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6605				    (real_oldequiv, mode)))))
6606		    new_icode = CODE_FOR_nothing;
6607
6608		  if (new_icode == CODE_FOR_nothing)
6609		    new_mode = mode;
6610		  else
6611		    new_mode = insn_data[(int) new_icode].operand[2].mode;
6612
6613		  if (GET_MODE (second_reload_reg) != new_mode)
6614		    {
6615		      if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6616					       new_mode))
6617			oldequiv = old, real_oldequiv = real_old;
6618		      else
6619			second_reload_reg
6620			  = reload_adjust_reg_for_mode (second_reload_reg,
6621							new_mode);
6622		    }
6623		}
6624	    }
6625	}
6626
6627      /* If we still need a secondary reload register, check
6628	 to see if it is being used as a scratch or intermediate
6629	 register and generate code appropriately.  If we need
6630	 a scratch register, use REAL_OLDEQUIV since the form of
6631	 the insn may depend on the actual address if it is
6632	 a MEM.  */
6633
6634      if (second_reload_reg)
6635	{
6636	  if (icode != CODE_FOR_nothing)
6637	    {
6638	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6639					  second_reload_reg));
6640	      special = 1;
6641	    }
6642	  else
6643	    {
6644	      /* See if we need a scratch register to load the
6645		 intermediate register (a tertiary reload).  */
6646	      enum insn_code tertiary_icode
6647		= rld[secondary_reload].secondary_in_icode;
6648
6649	      if (tertiary_icode != CODE_FOR_nothing)
6650		{
6651		  rtx third_reload_reg
6652		    = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
6653
6654		  emit_insn ((GEN_FCN (tertiary_icode)
6655			      (second_reload_reg, real_oldequiv,
6656			       third_reload_reg)));
6657		}
6658	      else
6659		gen_reload (second_reload_reg, real_oldequiv,
6660			    rl->opnum,
6661			    rl->when_needed);
6662
6663	      oldequiv = second_reload_reg;
6664	    }
6665	}
6666    }
6667#endif
6668
6669  if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6670    {
6671      rtx real_oldequiv = oldequiv;
6672
6673      if ((REG_P (oldequiv)
6674	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6675	   && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6676	       || reg_equiv_constant[REGNO (oldequiv)] != 0))
6677	  || (GET_CODE (oldequiv) == SUBREG
6678	      && REG_P (SUBREG_REG (oldequiv))
6679	      && (REGNO (SUBREG_REG (oldequiv))
6680		  >= FIRST_PSEUDO_REGISTER)
6681	      && ((reg_equiv_memory_loc
6682		   [REGNO (SUBREG_REG (oldequiv))] != 0)
6683		  || (reg_equiv_constant
6684		      [REGNO (SUBREG_REG (oldequiv))] != 0)))
6685	  || (CONSTANT_P (oldequiv)
6686	      && (PREFERRED_RELOAD_CLASS (oldequiv,
6687					  REGNO_REG_CLASS (REGNO (reloadreg)))
6688		  == NO_REGS)))
6689	real_oldequiv = rl->in;
6690      gen_reload (reloadreg, real_oldequiv, rl->opnum,
6691		  rl->when_needed);
6692    }
6693
6694  if (flag_non_call_exceptions)
6695    copy_eh_notes (insn, get_insns ());
6696
6697  /* End this sequence.  */
6698  *where = get_insns ();
6699  end_sequence ();
6700
6701  /* Update reload_override_in so that delete_address_reloads_1
6702     can see the actual register usage.  */
6703  if (oldequiv_reg)
6704    reload_override_in[j] = oldequiv;
6705}
6706
6707/* Generate insns to for the output reload RL, which is for the insn described
6708   by CHAIN and has the number J.  */
6709static void
6710emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
6711			  int j)
6712{
6713  rtx reloadreg = rl->reg_rtx;
6714  rtx insn = chain->insn;
6715  int special = 0;
6716  rtx old = rl->out;
6717  enum machine_mode mode = GET_MODE (old);
6718  rtx p;
6719
6720  if (rl->when_needed == RELOAD_OTHER)
6721    start_sequence ();
6722  else
6723    push_to_sequence (output_reload_insns[rl->opnum]);
6724
6725  /* Determine the mode to reload in.
6726     See comments above (for input reloading).  */
6727
6728  if (mode == VOIDmode)
6729    {
6730      /* VOIDmode should never happen for an output.  */
6731      if (asm_noperands (PATTERN (insn)) < 0)
6732	/* It's the compiler's fault.  */
6733	fatal_insn ("VOIDmode on an output", insn);
6734      error_for_asm (insn, "output operand is constant in %<asm%>");
6735      /* Prevent crash--use something we know is valid.  */
6736      mode = word_mode;
6737      old = gen_rtx_REG (mode, REGNO (reloadreg));
6738    }
6739
6740  if (GET_MODE (reloadreg) != mode)
6741    reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6742
6743#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6744
6745  /* If we need two reload regs, set RELOADREG to the intermediate
6746     one, since it will be stored into OLD.  We might need a secondary
6747     register only for an input reload, so check again here.  */
6748
6749  if (rl->secondary_out_reload >= 0)
6750    {
6751      rtx real_old = old;
6752
6753      if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
6754	  && reg_equiv_mem[REGNO (old)] != 0)
6755	real_old = reg_equiv_mem[REGNO (old)];
6756
6757      if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl->class,
6758					  mode, real_old)
6759	   != NO_REGS))
6760	{
6761	  rtx second_reloadreg = reloadreg;
6762	  reloadreg = rld[rl->secondary_out_reload].reg_rtx;
6763
6764	  /* See if RELOADREG is to be used as a scratch register
6765	     or as an intermediate register.  */
6766	  if (rl->secondary_out_icode != CODE_FOR_nothing)
6767	    {
6768	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
6769			  (real_old, second_reloadreg, reloadreg)));
6770	      special = 1;
6771	    }
6772	  else
6773	    {
6774	      /* See if we need both a scratch and intermediate reload
6775		 register.  */
6776
6777	      int secondary_reload = rl->secondary_out_reload;
6778	      enum insn_code tertiary_icode
6779		= rld[secondary_reload].secondary_out_icode;
6780
6781	      if (GET_MODE (reloadreg) != mode)
6782		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
6783
6784	      if (tertiary_icode != CODE_FOR_nothing)
6785		{
6786		  rtx third_reloadreg
6787		    = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
6788		  rtx tem;
6789
6790		  /* Copy primary reload reg to secondary reload reg.
6791		     (Note that these have been swapped above, then
6792		     secondary reload reg to OLD using our insn.)  */
6793
6794		  /* If REAL_OLD is a paradoxical SUBREG, remove it
6795		     and try to put the opposite SUBREG on
6796		     RELOADREG.  */
6797		  if (GET_CODE (real_old) == SUBREG
6798		      && (GET_MODE_SIZE (GET_MODE (real_old))
6799			  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6800		      && 0 != (tem = gen_lowpart_common
6801			       (GET_MODE (SUBREG_REG (real_old)),
6802				reloadreg)))
6803		    real_old = SUBREG_REG (real_old), reloadreg = tem;
6804
6805		  gen_reload (reloadreg, second_reloadreg,
6806			      rl->opnum, rl->when_needed);
6807		  emit_insn ((GEN_FCN (tertiary_icode)
6808			      (real_old, reloadreg, third_reloadreg)));
6809		  special = 1;
6810		}
6811
6812	      else
6813		/* Copy between the reload regs here and then to
6814		   OUT later.  */
6815
6816		gen_reload (reloadreg, second_reloadreg,
6817			    rl->opnum, rl->when_needed);
6818	    }
6819	}
6820    }
6821#endif
6822
6823  /* Output the last reload insn.  */
6824  if (! special)
6825    {
6826      rtx set;
6827
6828      /* Don't output the last reload if OLD is not the dest of
6829	 INSN and is in the src and is clobbered by INSN.  */
6830      if (! flag_expensive_optimizations
6831	  || !REG_P (old)
6832	  || !(set = single_set (insn))
6833	  || rtx_equal_p (old, SET_DEST (set))
6834	  || !reg_mentioned_p (old, SET_SRC (set))
6835	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
6836	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
6837	gen_reload (old, reloadreg, rl->opnum,
6838		    rl->when_needed);
6839    }
6840
6841  /* Look at all insns we emitted, just to be safe.  */
6842  for (p = get_insns (); p; p = NEXT_INSN (p))
6843    if (INSN_P (p))
6844      {
6845	rtx pat = PATTERN (p);
6846
6847	/* If this output reload doesn't come from a spill reg,
6848	   clear any memory of reloaded copies of the pseudo reg.
6849	   If this output reload comes from a spill reg,
6850	   reg_has_output_reload will make this do nothing.  */
6851	note_stores (pat, forget_old_reloads_1, NULL);
6852
6853	if (reg_mentioned_p (rl->reg_rtx, pat))
6854	  {
6855	    rtx set = single_set (insn);
6856	    if (reload_spill_index[j] < 0
6857		&& set
6858		&& SET_SRC (set) == rl->reg_rtx)
6859	      {
6860		int src = REGNO (SET_SRC (set));
6861
6862		reload_spill_index[j] = src;
6863		SET_HARD_REG_BIT (reg_is_output_reload, src);
6864		if (find_regno_note (insn, REG_DEAD, src))
6865		  SET_HARD_REG_BIT (reg_reloaded_died, src);
6866	      }
6867	    if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6868	      {
6869		int s = rl->secondary_out_reload;
6870		set = single_set (p);
6871		/* If this reload copies only to the secondary reload
6872		   register, the secondary reload does the actual
6873		   store.  */
6874		if (s >= 0 && set == NULL_RTX)
6875		  /* We can't tell what function the secondary reload
6876		     has and where the actual store to the pseudo is
6877		     made; leave new_spill_reg_store alone.  */
6878		  ;
6879		else if (s >= 0
6880			 && SET_SRC (set) == rl->reg_rtx
6881			 && SET_DEST (set) == rld[s].reg_rtx)
6882		  {
6883		    /* Usually the next instruction will be the
6884		       secondary reload insn;  if we can confirm
6885		       that it is, setting new_spill_reg_store to
6886		       that insn will allow an extra optimization.  */
6887		    rtx s_reg = rld[s].reg_rtx;
6888		    rtx next = NEXT_INSN (p);
6889		    rld[s].out = rl->out;
6890		    rld[s].out_reg = rl->out_reg;
6891		    set = single_set (next);
6892		    if (set && SET_SRC (set) == s_reg
6893			&& ! new_spill_reg_store[REGNO (s_reg)])
6894		      {
6895			SET_HARD_REG_BIT (reg_is_output_reload,
6896					  REGNO (s_reg));
6897			new_spill_reg_store[REGNO (s_reg)] = next;
6898		      }
6899		  }
6900		else
6901		  new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
6902	      }
6903	  }
6904      }
6905
6906  if (rl->when_needed == RELOAD_OTHER)
6907    {
6908      emit_insn (other_output_reload_insns[rl->opnum]);
6909      other_output_reload_insns[rl->opnum] = get_insns ();
6910    }
6911  else
6912    output_reload_insns[rl->opnum] = get_insns ();
6913
6914  if (flag_non_call_exceptions)
6915    copy_eh_notes (insn, get_insns ());
6916
6917  end_sequence ();
6918}
6919
6920/* Do input reloading for reload RL, which is for the insn described by CHAIN
6921   and has the number J.  */
6922static void
6923do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
6924{
6925  rtx insn = chain->insn;
6926  rtx old = (rl->in && MEM_P (rl->in)
6927	     ? rl->in_reg : rl->in);
6928
6929  if (old != 0
6930      /* AUTO_INC reloads need to be handled even if inherited.  We got an
6931	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
6932      && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
6933      && ! rtx_equal_p (rl->reg_rtx, old)
6934      && rl->reg_rtx != 0)
6935    emit_input_reload_insns (chain, rld + j, old, j);
6936
6937  /* When inheriting a wider reload, we have a MEM in rl->in,
6938     e.g. inheriting a SImode output reload for
6939     (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
6940  if (optimize && reload_inherited[j] && rl->in
6941      && MEM_P (rl->in)
6942      && MEM_P (rl->in_reg)
6943      && reload_spill_index[j] >= 0
6944      && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6945    rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6946
6947  /* If we are reloading a register that was recently stored in with an
6948     output-reload, see if we can prove there was
6949     actually no need to store the old value in it.  */
6950
6951  if (optimize
6952      /* Only attempt this for input reloads; for RELOAD_OTHER we miss
6953	 that there may be multiple uses of the previous output reload.
6954	 Restricting to RELOAD_FOR_INPUT is mostly paranoia.  */
6955      && rl->when_needed == RELOAD_FOR_INPUT
6956      && (reload_inherited[j] || reload_override_in[j])
6957      && rl->reg_rtx
6958      && REG_P (rl->reg_rtx)
6959      && spill_reg_store[REGNO (rl->reg_rtx)] != 0
6960#if 0
6961      /* There doesn't seem to be any reason to restrict this to pseudos
6962	 and doing so loses in the case where we are copying from a
6963	 register of the wrong class.  */
6964      && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
6965	  >= FIRST_PSEUDO_REGISTER)
6966#endif
6967      /* The insn might have already some references to stackslots
6968	 replaced by MEMs, while reload_out_reg still names the
6969	 original pseudo.  */
6970      && (dead_or_set_p (insn,
6971			 spill_reg_stored_to[REGNO (rl->reg_rtx)])
6972	  || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
6973			  rl->out_reg)))
6974    delete_output_reload (insn, j, REGNO (rl->reg_rtx));
6975}
6976
6977/* Do output reloading for reload RL, which is for the insn described by
6978   CHAIN and has the number J.
6979   ??? At some point we need to support handling output reloads of
6980   JUMP_INSNs or insns that set cc0.  */
6981static void
6982do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
6983{
6984  rtx note, old;
6985  rtx insn = chain->insn;
6986  /* If this is an output reload that stores something that is
6987     not loaded in this same reload, see if we can eliminate a previous
6988     store.  */
6989  rtx pseudo = rl->out_reg;
6990
6991  if (pseudo
6992      && optimize
6993      && REG_P (pseudo)
6994      && ! rtx_equal_p (rl->in_reg, pseudo)
6995      && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6996      && reg_last_reload_reg[REGNO (pseudo)])
6997    {
6998      int pseudo_no = REGNO (pseudo);
6999      int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7000
7001      /* We don't need to test full validity of last_regno for
7002	 inherit here; we only want to know if the store actually
7003	 matches the pseudo.  */
7004      if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7005	  && reg_reloaded_contents[last_regno] == pseudo_no
7006	  && spill_reg_store[last_regno]
7007	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7008	delete_output_reload (insn, j, last_regno);
7009    }
7010
7011  old = rl->out_reg;
7012  if (old == 0
7013      || rl->reg_rtx == old
7014      || rl->reg_rtx == 0)
7015    return;
7016
7017  /* An output operand that dies right away does need a reload,
7018     but need not be copied from it.  Show the new location in the
7019     REG_UNUSED note.  */
7020  if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7021      && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7022    {
7023      XEXP (note, 0) = rl->reg_rtx;
7024      return;
7025    }
7026  /* Likewise for a SUBREG of an operand that dies.  */
7027  else if (GET_CODE (old) == SUBREG
7028	   && REG_P (SUBREG_REG (old))
7029	   && 0 != (note = find_reg_note (insn, REG_UNUSED,
7030					  SUBREG_REG (old))))
7031    {
7032      XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7033					   rl->reg_rtx);
7034      return;
7035    }
7036  else if (GET_CODE (old) == SCRATCH)
7037    /* If we aren't optimizing, there won't be a REG_UNUSED note,
7038       but we don't want to make an output reload.  */
7039    return;
7040
7041  /* If is a JUMP_INSN, we can't support output reloads yet.  */
7042  gcc_assert (!JUMP_P (insn));
7043
7044  emit_output_reload_insns (chain, rld + j, j);
7045}
7046
7047/* Reload number R reloads from or to a group of hard registers starting at
7048   register REGNO.  Return true if it can be treated for inheritance purposes
7049   like a group of reloads, each one reloading a single hard register.
7050   The caller has already checked that the spill register and REGNO use
7051   the same number of registers to store the reload value.  */
7052
7053static bool
7054inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
7055{
7056#ifdef CANNOT_CHANGE_MODE_CLASS
7057  return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
7058				     GET_MODE (rld[r].reg_rtx),
7059				     reg_raw_mode[reload_spill_index[r]])
7060	  && !REG_CANNOT_CHANGE_MODE_P (regno,
7061					GET_MODE (rld[r].reg_rtx),
7062					reg_raw_mode[regno]));
7063#else
7064  return true;
7065#endif
7066}
7067
7068/* Output insns to reload values in and out of the chosen reload regs.  */
7069
7070static void
7071emit_reload_insns (struct insn_chain *chain)
7072{
7073  rtx insn = chain->insn;
7074
7075  int j;
7076
7077  CLEAR_HARD_REG_SET (reg_reloaded_died);
7078
7079  for (j = 0; j < reload_n_operands; j++)
7080    input_reload_insns[j] = input_address_reload_insns[j]
7081      = inpaddr_address_reload_insns[j]
7082      = output_reload_insns[j] = output_address_reload_insns[j]
7083      = outaddr_address_reload_insns[j]
7084      = other_output_reload_insns[j] = 0;
7085  other_input_address_reload_insns = 0;
7086  other_input_reload_insns = 0;
7087  operand_reload_insns = 0;
7088  other_operand_reload_insns = 0;
7089
7090  /* Dump reloads into the dump file.  */
7091  if (dump_file)
7092    {
7093      fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7094      debug_reload_to_stream (dump_file);
7095    }
7096
7097  /* Now output the instructions to copy the data into and out of the
7098     reload registers.  Do these in the order that the reloads were reported,
7099     since reloads of base and index registers precede reloads of operands
7100     and the operands may need the base and index registers reloaded.  */
7101
7102  for (j = 0; j < n_reloads; j++)
7103    {
7104      if (rld[j].reg_rtx
7105	  && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7106	new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7107
7108      do_input_reload (chain, rld + j, j);
7109      do_output_reload (chain, rld + j, j);
7110    }
7111
7112  /* Now write all the insns we made for reloads in the order expected by
7113     the allocation functions.  Prior to the insn being reloaded, we write
7114     the following reloads:
7115
7116     RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7117
7118     RELOAD_OTHER reloads.
7119
7120     For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7121     by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7122     RELOAD_FOR_INPUT reload for the operand.
7123
7124     RELOAD_FOR_OPADDR_ADDRS reloads.
7125
7126     RELOAD_FOR_OPERAND_ADDRESS reloads.
7127
7128     After the insn being reloaded, we write the following:
7129
7130     For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7131     by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7132     RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7133     reloads for the operand.  The RELOAD_OTHER output reloads are
7134     output in descending order by reload number.  */
7135
7136  emit_insn_before (other_input_address_reload_insns, insn);
7137  emit_insn_before (other_input_reload_insns, insn);
7138
7139  for (j = 0; j < reload_n_operands; j++)
7140    {
7141      emit_insn_before (inpaddr_address_reload_insns[j], insn);
7142      emit_insn_before (input_address_reload_insns[j], insn);
7143      emit_insn_before (input_reload_insns[j], insn);
7144    }
7145
7146  emit_insn_before (other_operand_reload_insns, insn);
7147  emit_insn_before (operand_reload_insns, insn);
7148
7149  for (j = 0; j < reload_n_operands; j++)
7150    {
7151      rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7152      x = emit_insn_after (output_address_reload_insns[j], x);
7153      x = emit_insn_after (output_reload_insns[j], x);
7154      emit_insn_after (other_output_reload_insns[j], x);
7155    }
7156
7157  /* For all the spill regs newly reloaded in this instruction,
7158     record what they were reloaded from, so subsequent instructions
7159     can inherit the reloads.
7160
7161     Update spill_reg_store for the reloads of this insn.
7162     Copy the elements that were updated in the loop above.  */
7163
7164  for (j = 0; j < n_reloads; j++)
7165    {
7166      int r = reload_order[j];
7167      int i = reload_spill_index[r];
7168
7169      /* If this is a non-inherited input reload from a pseudo, we must
7170	 clear any memory of a previous store to the same pseudo.  Only do
7171	 something if there will not be an output reload for the pseudo
7172	 being reloaded.  */
7173      if (rld[r].in_reg != 0
7174	  && ! (reload_inherited[r] || reload_override_in[r]))
7175	{
7176	  rtx reg = rld[r].in_reg;
7177
7178	  if (GET_CODE (reg) == SUBREG)
7179	    reg = SUBREG_REG (reg);
7180
7181	  if (REG_P (reg)
7182	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7183	      && ! reg_has_output_reload[REGNO (reg)])
7184	    {
7185	      int nregno = REGNO (reg);
7186
7187	      if (reg_last_reload_reg[nregno])
7188		{
7189		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
7190
7191		  if (reg_reloaded_contents[last_regno] == nregno)
7192		    spill_reg_store[last_regno] = 0;
7193		}
7194	    }
7195	}
7196
7197      /* I is nonneg if this reload used a register.
7198	 If rld[r].reg_rtx is 0, this is an optional reload
7199	 that we opted to ignore.  */
7200
7201      if (i >= 0 && rld[r].reg_rtx != 0)
7202	{
7203	  int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7204	  int k;
7205	  int part_reaches_end = 0;
7206	  int all_reaches_end = 1;
7207
7208	  /* For a multi register reload, we need to check if all or part
7209	     of the value lives to the end.  */
7210	  for (k = 0; k < nr; k++)
7211	    {
7212	      if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7213					    rld[r].when_needed))
7214		part_reaches_end = 1;
7215	      else
7216		all_reaches_end = 0;
7217	    }
7218
7219	  /* Ignore reloads that don't reach the end of the insn in
7220	     entirety.  */
7221	  if (all_reaches_end)
7222	    {
7223	      /* First, clear out memory of what used to be in this spill reg.
7224		 If consecutive registers are used, clear them all.  */
7225
7226	      for (k = 0; k < nr; k++)
7227  	        {
7228		CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7229  		  CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7230  		}
7231
7232	      /* Maybe the spill reg contains a copy of reload_out.  */
7233	      if (rld[r].out != 0
7234		  && (REG_P (rld[r].out)
7235#ifdef AUTO_INC_DEC
7236		      || ! rld[r].out_reg
7237#endif
7238		      || REG_P (rld[r].out_reg)))
7239		{
7240		  rtx out = (REG_P (rld[r].out)
7241			     ? rld[r].out
7242			     : rld[r].out_reg
7243			     ? rld[r].out_reg
7244/* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
7245		  int nregno = REGNO (out);
7246		  int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7247			     : hard_regno_nregs[nregno]
7248					       [GET_MODE (rld[r].reg_rtx)]);
7249		  bool piecemeal;
7250
7251		  spill_reg_store[i] = new_spill_reg_store[i];
7252		  spill_reg_stored_to[i] = out;
7253		  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7254
7255		  piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7256			       && nr == nnr
7257			       && inherit_piecemeal_p (r, nregno));
7258
7259		  /* If NREGNO is a hard register, it may occupy more than
7260		     one register.  If it does, say what is in the
7261		     rest of the registers assuming that both registers
7262		     agree on how many words the object takes.  If not,
7263		     invalidate the subsequent registers.  */
7264
7265		  if (nregno < FIRST_PSEUDO_REGISTER)
7266		    for (k = 1; k < nnr; k++)
7267		      reg_last_reload_reg[nregno + k]
7268			= (piecemeal
7269			   ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7270			   : 0);
7271
7272		  /* Now do the inverse operation.  */
7273		  for (k = 0; k < nr; k++)
7274		    {
7275		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7276		      reg_reloaded_contents[i + k]
7277			= (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7278			   ? nregno
7279			   : nregno + k);
7280		      reg_reloaded_insn[i + k] = insn;
7281		      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7282		      if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7283			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7284		    }
7285		}
7286
7287	      /* Maybe the spill reg contains a copy of reload_in.  Only do
7288		 something if there will not be an output reload for
7289		 the register being reloaded.  */
7290	      else if (rld[r].out_reg == 0
7291		       && rld[r].in != 0
7292		       && ((REG_P (rld[r].in)
7293			    && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7294			    && ! reg_has_output_reload[REGNO (rld[r].in)])
7295			   || (REG_P (rld[r].in_reg)
7296			       && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7297		       && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7298		{
7299		  int nregno;
7300		  int nnr;
7301		  rtx in;
7302		  bool piecemeal;
7303
7304		  if (REG_P (rld[r].in)
7305		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7306		    in = rld[r].in;
7307		  else if (REG_P (rld[r].in_reg))
7308		    in = rld[r].in_reg;
7309		  else
7310		    in = XEXP (rld[r].in_reg, 0);
7311		  nregno = REGNO (in);
7312
7313		  nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7314			 : hard_regno_nregs[nregno]
7315					   [GET_MODE (rld[r].reg_rtx)]);
7316
7317		  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7318
7319		  piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7320			       && nr == nnr
7321			       && inherit_piecemeal_p (r, nregno));
7322
7323		  if (nregno < FIRST_PSEUDO_REGISTER)
7324		    for (k = 1; k < nnr; k++)
7325		      reg_last_reload_reg[nregno + k]
7326			= (piecemeal
7327			   ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7328			   : 0);
7329
7330		  /* Unless we inherited this reload, show we haven't
7331		     recently done a store.
7332		     Previous stores of inherited auto_inc expressions
7333		     also have to be discarded.  */
7334		  if (! reload_inherited[r]
7335		      || (rld[r].out && ! rld[r].out_reg))
7336		    spill_reg_store[i] = 0;
7337
7338		  for (k = 0; k < nr; k++)
7339		    {
7340		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7341		      reg_reloaded_contents[i + k]
7342			= (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
7343			   ? nregno
7344			   : nregno + k);
7345		      reg_reloaded_insn[i + k] = insn;
7346		      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7347		      if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7348			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7349		    }
7350		}
7351	    }
7352
7353	  /* However, if part of the reload reaches the end, then we must
7354	     invalidate the old info for the part that survives to the end.  */
7355	  else if (part_reaches_end)
7356	    {
7357	      for (k = 0; k < nr; k++)
7358		if (reload_reg_reaches_end_p (i + k,
7359					      rld[r].opnum,
7360					      rld[r].when_needed))
7361		  CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7362	    }
7363	}
7364
7365      /* The following if-statement was #if 0'd in 1.34 (or before...).
7366	 It's reenabled in 1.35 because supposedly nothing else
7367	 deals with this problem.  */
7368
7369      /* If a register gets output-reloaded from a non-spill register,
7370	 that invalidates any previous reloaded copy of it.
7371	 But forget_old_reloads_1 won't get to see it, because
7372	 it thinks only about the original insn.  So invalidate it here.  */
7373      if (i < 0 && rld[r].out != 0
7374	  && (REG_P (rld[r].out)
7375	      || (MEM_P (rld[r].out)
7376		  && REG_P (rld[r].out_reg))))
7377	{
7378	  rtx out = (REG_P (rld[r].out)
7379		     ? rld[r].out : rld[r].out_reg);
7380	  int nregno = REGNO (out);
7381	  if (nregno >= FIRST_PSEUDO_REGISTER)
7382	    {
7383	      rtx src_reg, store_insn = NULL_RTX;
7384
7385	      reg_last_reload_reg[nregno] = 0;
7386
7387	      /* If we can find a hard register that is stored, record
7388		 the storing insn so that we may delete this insn with
7389		 delete_output_reload.  */
7390	      src_reg = rld[r].reg_rtx;
7391
7392	      /* If this is an optional reload, try to find the source reg
7393		 from an input reload.  */
7394	      if (! src_reg)
7395		{
7396		  rtx set = single_set (insn);
7397		  if (set && SET_DEST (set) == rld[r].out)
7398		    {
7399		      int k;
7400
7401		      src_reg = SET_SRC (set);
7402		      store_insn = insn;
7403		      for (k = 0; k < n_reloads; k++)
7404			{
7405			  if (rld[k].in == src_reg)
7406			    {
7407			      src_reg = rld[k].reg_rtx;
7408			      break;
7409			    }
7410			}
7411		    }
7412		}
7413	      else
7414		store_insn = new_spill_reg_store[REGNO (src_reg)];
7415	      if (src_reg && REG_P (src_reg)
7416		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7417		{
7418		  int src_regno = REGNO (src_reg);
7419		  int nr = hard_regno_nregs[src_regno][rld[r].mode];
7420		  /* The place where to find a death note varies with
7421		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
7422		     necessarily checked exactly in the code that moves
7423		     notes, so just check both locations.  */
7424		  rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7425		  if (! note && store_insn)
7426		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
7427		  while (nr-- > 0)
7428		    {
7429		      spill_reg_store[src_regno + nr] = store_insn;
7430		      spill_reg_stored_to[src_regno + nr] = out;
7431		      reg_reloaded_contents[src_regno + nr] = nregno;
7432		      reg_reloaded_insn[src_regno + nr] = store_insn;
7433		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7434		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7435		      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7436							  GET_MODE (src_reg)))
7437			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7438					  src_regno + nr);
7439		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7440		      if (note)
7441			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7442		      else
7443			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7444		    }
7445		  reg_last_reload_reg[nregno] = src_reg;
7446		  /* We have to set reg_has_output_reload here, or else
7447		     forget_old_reloads_1 will clear reg_last_reload_reg
7448		     right away.  */
7449		  reg_has_output_reload[nregno] = 1;
7450		}
7451	    }
7452	  else
7453	    {
7454	      int num_regs = hard_regno_nregs[nregno][GET_MODE (rld[r].out)];
7455
7456	      while (num_regs-- > 0)
7457		reg_last_reload_reg[nregno + num_regs] = 0;
7458	    }
7459	}
7460    }
7461  IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7462}
7463
7464/* Go through the motions to emit INSN and test if it is strictly valid.
7465   Return the emitted insn if valid, else return NULL.  */
7466
7467static rtx
7468emit_insn_if_valid_for_reload (rtx insn)
7469{
7470  rtx last = get_last_insn ();
7471  int code;
7472
7473  insn = emit_insn (insn);
7474  code = recog_memoized (insn);
7475
7476  if (code >= 0)
7477    {
7478      extract_insn (insn);
7479      /* We want constrain operands to treat this insn strictly in its
7480	 validity determination, i.e., the way it would after reload has
7481	 completed.  */
7482      if (constrain_operands (1))
7483	return insn;
7484    }
7485
7486  delete_insns_since (last);
7487  return NULL;
7488}
7489
7490/* Emit code to perform a reload from IN (which may be a reload register) to
7491   OUT (which may also be a reload register).  IN or OUT is from operand
7492   OPNUM with reload type TYPE.
7493
7494   Returns first insn emitted.  */
7495
7496static rtx
7497gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
7498{
7499  rtx last = get_last_insn ();
7500  rtx tem;
7501
7502  /* If IN is a paradoxical SUBREG, remove it and try to put the
7503     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
7504  if (GET_CODE (in) == SUBREG
7505      && (GET_MODE_SIZE (GET_MODE (in))
7506	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7507      && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7508    in = SUBREG_REG (in), out = tem;
7509  else if (GET_CODE (out) == SUBREG
7510	   && (GET_MODE_SIZE (GET_MODE (out))
7511	       > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7512	   && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7513    out = SUBREG_REG (out), in = tem;
7514
7515  /* How to do this reload can get quite tricky.  Normally, we are being
7516     asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7517     register that didn't get a hard register.  In that case we can just
7518     call emit_move_insn.
7519
7520     We can also be asked to reload a PLUS that adds a register or a MEM to
7521     another register, constant or MEM.  This can occur during frame pointer
7522     elimination and while reloading addresses.  This case is handled by
7523     trying to emit a single insn to perform the add.  If it is not valid,
7524     we use a two insn sequence.
7525
7526     Or we can be asked to reload an unary operand that was a fragment of
7527     an addressing mode, into a register.  If it isn't recognized as-is,
7528     we try making the unop operand and the reload-register the same:
7529     (set reg:X (unop:X expr:Y))
7530     -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
7531
7532     Finally, we could be called to handle an 'o' constraint by putting
7533     an address into a register.  In that case, we first try to do this
7534     with a named pattern of "reload_load_address".  If no such pattern
7535     exists, we just emit a SET insn and hope for the best (it will normally
7536     be valid on machines that use 'o').
7537
7538     This entire process is made complex because reload will never
7539     process the insns we generate here and so we must ensure that
7540     they will fit their constraints and also by the fact that parts of
7541     IN might be being reloaded separately and replaced with spill registers.
7542     Because of this, we are, in some sense, just guessing the right approach
7543     here.  The one listed above seems to work.
7544
7545     ??? At some point, this whole thing needs to be rethought.  */
7546
7547  if (GET_CODE (in) == PLUS
7548      && (REG_P (XEXP (in, 0))
7549	  || GET_CODE (XEXP (in, 0)) == SUBREG
7550	  || MEM_P (XEXP (in, 0)))
7551      && (REG_P (XEXP (in, 1))
7552	  || GET_CODE (XEXP (in, 1)) == SUBREG
7553	  || CONSTANT_P (XEXP (in, 1))
7554	  || MEM_P (XEXP (in, 1))))
7555    {
7556      /* We need to compute the sum of a register or a MEM and another
7557	 register, constant, or MEM, and put it into the reload
7558	 register.  The best possible way of doing this is if the machine
7559	 has a three-operand ADD insn that accepts the required operands.
7560
7561	 The simplest approach is to try to generate such an insn and see if it
7562	 is recognized and matches its constraints.  If so, it can be used.
7563
7564	 It might be better not to actually emit the insn unless it is valid,
7565	 but we need to pass the insn as an operand to `recog' and
7566	 `extract_insn' and it is simpler to emit and then delete the insn if
7567	 not valid than to dummy things up.  */
7568
7569      rtx op0, op1, tem, insn;
7570      int code;
7571
7572      op0 = find_replacement (&XEXP (in, 0));
7573      op1 = find_replacement (&XEXP (in, 1));
7574
7575      /* Since constraint checking is strict, commutativity won't be
7576	 checked, so we need to do that here to avoid spurious failure
7577	 if the add instruction is two-address and the second operand
7578	 of the add is the same as the reload reg, which is frequently
7579	 the case.  If the insn would be A = B + A, rearrange it so
7580	 it will be A = A + B as constrain_operands expects.  */
7581
7582      if (REG_P (XEXP (in, 1))
7583	  && REGNO (out) == REGNO (XEXP (in, 1)))
7584	tem = op0, op0 = op1, op1 = tem;
7585
7586      if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7587	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7588
7589      insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
7590      if (insn)
7591	return insn;
7592
7593      /* If that failed, we must use a conservative two-insn sequence.
7594
7595	 Use a move to copy one operand into the reload register.  Prefer
7596	 to reload a constant, MEM or pseudo since the move patterns can
7597	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
7598	 pseudo and OP1 is not a valid operand for an add instruction, then
7599	 reload OP1.
7600
7601	 After reloading one of the operands into the reload register, add
7602	 the reload register to the output register.
7603
7604	 If there is another way to do this for a specific machine, a
7605	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7606	 we emit below.  */
7607
7608      code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7609
7610      if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
7611	  || (REG_P (op1)
7612	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7613	  || (code != CODE_FOR_nothing
7614	      && ! ((*insn_data[code].operand[2].predicate)
7615		    (op1, insn_data[code].operand[2].mode))))
7616	tem = op0, op0 = op1, op1 = tem;
7617
7618      gen_reload (out, op0, opnum, type);
7619
7620      /* If OP0 and OP1 are the same, we can use OUT for OP1.
7621	 This fixes a problem on the 32K where the stack pointer cannot
7622	 be used as an operand of an add insn.  */
7623
7624      if (rtx_equal_p (op0, op1))
7625	op1 = out;
7626
7627      insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
7628      if (insn)
7629	{
7630	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
7631	  REG_NOTES (insn)
7632	    = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7633	  return insn;
7634	}
7635
7636      /* If that failed, copy the address register to the reload register.
7637	 Then add the constant to the reload register.  */
7638
7639      gen_reload (out, op1, opnum, type);
7640      insn = emit_insn (gen_add2_insn (out, op0));
7641      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7642    }
7643
7644#ifdef SECONDARY_MEMORY_NEEDED
7645  /* If we need a memory location to do the move, do it that way.  */
7646  else if ((REG_P (in) || GET_CODE (in) == SUBREG)
7647	   && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
7648	   && (REG_P (out) || GET_CODE (out) == SUBREG)
7649	   && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
7650	   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
7651				       REGNO_REG_CLASS (reg_or_subregno (out)),
7652				       GET_MODE (out)))
7653    {
7654      /* Get the memory to use and rewrite both registers to its mode.  */
7655      rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7656
7657      if (GET_MODE (loc) != GET_MODE (out))
7658	out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7659
7660      if (GET_MODE (loc) != GET_MODE (in))
7661	in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7662
7663      gen_reload (loc, in, opnum, type);
7664      gen_reload (out, loc, opnum, type);
7665    }
7666#endif
7667  else if (REG_P (out) && UNARY_P (in))
7668    {
7669      rtx insn;
7670      rtx op1;
7671      rtx out_moded;
7672      rtx set;
7673
7674      op1 = find_replacement (&XEXP (in, 0));
7675      if (op1 != XEXP (in, 0))
7676	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
7677
7678      /* First, try a plain SET.  */
7679      set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
7680      if (set)
7681	return set;
7682
7683      /* If that failed, move the inner operand to the reload
7684	 register, and try the same unop with the inner expression
7685	 replaced with the reload register.  */
7686
7687      if (GET_MODE (op1) != GET_MODE (out))
7688	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
7689      else
7690	out_moded = out;
7691
7692      gen_reload (out_moded, op1, opnum, type);
7693
7694      insn
7695	= gen_rtx_SET (VOIDmode, out,
7696		       gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
7697				      out_moded));
7698      insn = emit_insn_if_valid_for_reload (insn);
7699      if (insn)
7700	{
7701	  REG_NOTES (insn)
7702	    = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7703	  return insn;
7704	}
7705
7706      fatal_insn ("Failure trying to reload:", set);
7707    }
7708  /* If IN is a simple operand, use gen_move_insn.  */
7709  else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
7710    emit_insn (gen_move_insn (out, in));
7711
7712#ifdef HAVE_reload_load_address
7713  else if (HAVE_reload_load_address)
7714    emit_insn (gen_reload_load_address (out, in));
7715#endif
7716
7717  /* Otherwise, just write (set OUT IN) and hope for the best.  */
7718  else
7719    emit_insn (gen_rtx_SET (VOIDmode, out, in));
7720
7721  /* Return the first insn emitted.
7722     We can not just return get_last_insn, because there may have
7723     been multiple instructions emitted.  Also note that gen_move_insn may
7724     emit more than one insn itself, so we can not assume that there is one
7725     insn emitted per emit_insn_before call.  */
7726
7727  return last ? NEXT_INSN (last) : get_insns ();
7728}
7729
7730/* Delete a previously made output-reload whose result we now believe
7731   is not needed.  First we double-check.
7732
7733   INSN is the insn now being processed.
7734   LAST_RELOAD_REG is the hard register number for which we want to delete
7735   the last output reload.
7736   J is the reload-number that originally used REG.  The caller has made
7737   certain that reload J doesn't use REG any longer for input.  */
7738
7739static void
7740delete_output_reload (rtx insn, int j, int last_reload_reg)
7741{
7742  rtx output_reload_insn = spill_reg_store[last_reload_reg];
7743  rtx reg = spill_reg_stored_to[last_reload_reg];
7744  int k;
7745  int n_occurrences;
7746  int n_inherited = 0;
7747  rtx i1;
7748  rtx substed;
7749
7750  /* It is possible that this reload has been only used to set another reload
7751     we eliminated earlier and thus deleted this instruction too.  */
7752  if (INSN_DELETED_P (output_reload_insn))
7753    return;
7754
7755  /* Get the raw pseudo-register referred to.  */
7756
7757  while (GET_CODE (reg) == SUBREG)
7758    reg = SUBREG_REG (reg);
7759  substed = reg_equiv_memory_loc[REGNO (reg)];
7760
7761  /* This is unsafe if the operand occurs more often in the current
7762     insn than it is inherited.  */
7763  for (k = n_reloads - 1; k >= 0; k--)
7764    {
7765      rtx reg2 = rld[k].in;
7766      if (! reg2)
7767	continue;
7768      if (MEM_P (reg2) || reload_override_in[k])
7769	reg2 = rld[k].in_reg;
7770#ifdef AUTO_INC_DEC
7771      if (rld[k].out && ! rld[k].out_reg)
7772	reg2 = XEXP (rld[k].in_reg, 0);
7773#endif
7774      while (GET_CODE (reg2) == SUBREG)
7775	reg2 = SUBREG_REG (reg2);
7776      if (rtx_equal_p (reg2, reg))
7777	{
7778	  if (reload_inherited[k] || reload_override_in[k] || k == j)
7779	    {
7780	      n_inherited++;
7781	      reg2 = rld[k].out_reg;
7782	      if (! reg2)
7783		continue;
7784	      while (GET_CODE (reg2) == SUBREG)
7785		reg2 = XEXP (reg2, 0);
7786	      if (rtx_equal_p (reg2, reg))
7787		n_inherited++;
7788	    }
7789	  else
7790	    return;
7791	}
7792    }
7793  n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7794  if (substed)
7795    n_occurrences += count_occurrences (PATTERN (insn),
7796					eliminate_regs (substed, 0,
7797							NULL_RTX), 0);
7798  if (n_occurrences > n_inherited)
7799    return;
7800
7801  /* If the pseudo-reg we are reloading is no longer referenced
7802     anywhere between the store into it and here,
7803     and we're within the same basic block, then the value can only
7804     pass through the reload reg and end up here.
7805     Otherwise, give up--return.  */
7806  for (i1 = NEXT_INSN (output_reload_insn);
7807       i1 != insn; i1 = NEXT_INSN (i1))
7808    {
7809      if (NOTE_INSN_BASIC_BLOCK_P (i1))
7810	return;
7811      if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
7812	  && reg_mentioned_p (reg, PATTERN (i1)))
7813	{
7814	  /* If this is USE in front of INSN, we only have to check that
7815	     there are no more references than accounted for by inheritance.  */
7816	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
7817	    {
7818	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7819	      i1 = NEXT_INSN (i1);
7820	    }
7821	  if (n_occurrences <= n_inherited && i1 == insn)
7822	    break;
7823	  return;
7824	}
7825    }
7826
7827  /* We will be deleting the insn.  Remove the spill reg information.  */
7828  for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
7829    {
7830      spill_reg_store[last_reload_reg + k] = 0;
7831      spill_reg_stored_to[last_reload_reg + k] = 0;
7832    }
7833
7834  /* The caller has already checked that REG dies or is set in INSN.
7835     It has also checked that we are optimizing, and thus some
7836     inaccuracies in the debugging information are acceptable.
7837     So we could just delete output_reload_insn.  But in some cases
7838     we can improve the debugging information without sacrificing
7839     optimization - maybe even improving the code: See if the pseudo
7840     reg has been completely replaced with reload regs.  If so, delete
7841     the store insn and forget we had a stack slot for the pseudo.  */
7842  if (rld[j].out != rld[j].in
7843      && REG_N_DEATHS (REGNO (reg)) == 1
7844      && REG_N_SETS (REGNO (reg)) == 1
7845      && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7846      && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7847    {
7848      rtx i2;
7849
7850      /* We know that it was used only between here and the beginning of
7851	 the current basic block.  (We also know that the last use before
7852	 INSN was the output reload we are thinking of deleting, but never
7853	 mind that.)  Search that range; see if any ref remains.  */
7854      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7855	{
7856	  rtx set = single_set (i2);
7857
7858	  /* Uses which just store in the pseudo don't count,
7859	     since if they are the only uses, they are dead.  */
7860	  if (set != 0 && SET_DEST (set) == reg)
7861	    continue;
7862	  if (LABEL_P (i2)
7863	      || JUMP_P (i2))
7864	    break;
7865	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
7866	      && reg_mentioned_p (reg, PATTERN (i2)))
7867	    {
7868	      /* Some other ref remains; just delete the output reload we
7869		 know to be dead.  */
7870	      delete_address_reloads (output_reload_insn, insn);
7871	      delete_insn (output_reload_insn);
7872	      return;
7873	    }
7874	}
7875
7876      /* Delete the now-dead stores into this pseudo.  Note that this
7877	 loop also takes care of deleting output_reload_insn.  */
7878      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7879	{
7880	  rtx set = single_set (i2);
7881
7882	  if (set != 0 && SET_DEST (set) == reg)
7883	    {
7884	      delete_address_reloads (i2, insn);
7885	      delete_insn (i2);
7886	    }
7887	  if (LABEL_P (i2)
7888	      || JUMP_P (i2))
7889	    break;
7890	}
7891
7892      /* For the debugging info, say the pseudo lives in this reload reg.  */
7893      reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
7894      alter_reg (REGNO (reg), -1);
7895    }
7896  else
7897    {
7898      delete_address_reloads (output_reload_insn, insn);
7899      delete_insn (output_reload_insn);
7900    }
7901}
7902
7903/* We are going to delete DEAD_INSN.  Recursively delete loads of
7904   reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7905   CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
7906static void
7907delete_address_reloads (rtx dead_insn, rtx current_insn)
7908{
7909  rtx set = single_set (dead_insn);
7910  rtx set2, dst, prev, next;
7911  if (set)
7912    {
7913      rtx dst = SET_DEST (set);
7914      if (MEM_P (dst))
7915	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7916    }
7917  /* If we deleted the store from a reloaded post_{in,de}c expression,
7918     we can delete the matching adds.  */
7919  prev = PREV_INSN (dead_insn);
7920  next = NEXT_INSN (dead_insn);
7921  if (! prev || ! next)
7922    return;
7923  set = single_set (next);
7924  set2 = single_set (prev);
7925  if (! set || ! set2
7926      || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7927      || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7928      || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7929    return;
7930  dst = SET_DEST (set);
7931  if (! rtx_equal_p (dst, SET_DEST (set2))
7932      || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7933      || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7934      || (INTVAL (XEXP (SET_SRC (set), 1))
7935	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
7936    return;
7937  delete_related_insns (prev);
7938  delete_related_insns (next);
7939}
7940
7941/* Subfunction of delete_address_reloads: process registers found in X.  */
7942static void
7943delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
7944{
7945  rtx prev, set, dst, i2;
7946  int i, j;
7947  enum rtx_code code = GET_CODE (x);
7948
7949  if (code != REG)
7950    {
7951      const char *fmt = GET_RTX_FORMAT (code);
7952      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7953	{
7954	  if (fmt[i] == 'e')
7955	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7956	  else if (fmt[i] == 'E')
7957	    {
7958	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7959		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7960					  current_insn);
7961	    }
7962	}
7963      return;
7964    }
7965
7966  if (spill_reg_order[REGNO (x)] < 0)
7967    return;
7968
7969  /* Scan backwards for the insn that sets x.  This might be a way back due
7970     to inheritance.  */
7971  for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
7972    {
7973      code = GET_CODE (prev);
7974      if (code == CODE_LABEL || code == JUMP_INSN)
7975	return;
7976      if (!INSN_P (prev))
7977	continue;
7978      if (reg_set_p (x, PATTERN (prev)))
7979	break;
7980      if (reg_referenced_p (x, PATTERN (prev)))
7981	return;
7982    }
7983  if (! prev || INSN_UID (prev) < reload_first_uid)
7984    return;
7985  /* Check that PREV only sets the reload register.  */
7986  set = single_set (prev);
7987  if (! set)
7988    return;
7989  dst = SET_DEST (set);
7990  if (!REG_P (dst)
7991      || ! rtx_equal_p (dst, x))
7992    return;
7993  if (! reg_set_p (dst, PATTERN (dead_insn)))
7994    {
7995      /* Check if DST was used in a later insn -
7996	 it might have been inherited.  */
7997      for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
7998	{
7999	  if (LABEL_P (i2))
8000	    break;
8001	  if (! INSN_P (i2))
8002	    continue;
8003	  if (reg_referenced_p (dst, PATTERN (i2)))
8004	    {
8005	      /* If there is a reference to the register in the current insn,
8006		 it might be loaded in a non-inherited reload.  If no other
8007		 reload uses it, that means the register is set before
8008		 referenced.  */
8009	      if (i2 == current_insn)
8010		{
8011		  for (j = n_reloads - 1; j >= 0; j--)
8012		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
8013			|| reload_override_in[j] == dst)
8014		      return;
8015		  for (j = n_reloads - 1; j >= 0; j--)
8016		    if (rld[j].in && rld[j].reg_rtx == dst)
8017		      break;
8018		  if (j >= 0)
8019		    break;
8020		}
8021	      return;
8022	    }
8023	  if (JUMP_P (i2))
8024	    break;
8025	  /* If DST is still live at CURRENT_INSN, check if it is used for
8026	     any reload.  Note that even if CURRENT_INSN sets DST, we still
8027	     have to check the reloads.  */
8028	  if (i2 == current_insn)
8029	    {
8030	      for (j = n_reloads - 1; j >= 0; j--)
8031		if ((rld[j].reg_rtx == dst && reload_inherited[j])
8032		    || reload_override_in[j] == dst)
8033		  return;
8034	      /* ??? We can't finish the loop here, because dst might be
8035		 allocated to a pseudo in this block if no reload in this
8036		 block needs any of the classes containing DST - see
8037		 spill_hard_reg.  There is no easy way to tell this, so we
8038		 have to scan till the end of the basic block.  */
8039	    }
8040	  if (reg_set_p (dst, PATTERN (i2)))
8041	    break;
8042	}
8043    }
8044  delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8045  reg_reloaded_contents[REGNO (dst)] = -1;
8046  delete_insn (prev);
8047}
8048
8049/* Output reload-insns to reload VALUE into RELOADREG.
8050   VALUE is an autoincrement or autodecrement RTX whose operand
8051   is a register or memory location;
8052   so reloading involves incrementing that location.
8053   IN is either identical to VALUE, or some cheaper place to reload from.
8054
8055   INC_AMOUNT is the number to increment or decrement by (always positive).
8056   This cannot be deduced from VALUE.
8057
8058   Return the instruction that stores into RELOADREG.  */
8059
8060static rtx
8061inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8062{
8063  /* REG or MEM to be copied and incremented.  */
8064  rtx incloc = XEXP (value, 0);
8065  /* Nonzero if increment after copying.  */
8066  int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
8067  rtx last;
8068  rtx inc;
8069  rtx add_insn;
8070  int code;
8071  rtx store;
8072  rtx real_in = in == value ? XEXP (in, 0) : in;
8073
8074  /* No hard register is equivalent to this register after
8075     inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
8076     we could inc/dec that register as well (maybe even using it for
8077     the source), but I'm not sure it's worth worrying about.  */
8078  if (REG_P (incloc))
8079    reg_last_reload_reg[REGNO (incloc)] = 0;
8080
8081  if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8082    inc_amount = -inc_amount;
8083
8084  inc = GEN_INT (inc_amount);
8085
8086  /* If this is post-increment, first copy the location to the reload reg.  */
8087  if (post && real_in != reloadreg)
8088    emit_insn (gen_move_insn (reloadreg, real_in));
8089
8090  if (in == value)
8091    {
8092      /* See if we can directly increment INCLOC.  Use a method similar to
8093	 that in gen_reload.  */
8094
8095      last = get_last_insn ();
8096      add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8097					 gen_rtx_PLUS (GET_MODE (incloc),
8098						       incloc, inc)));
8099
8100      code = recog_memoized (add_insn);
8101      if (code >= 0)
8102	{
8103	  extract_insn (add_insn);
8104	  if (constrain_operands (1))
8105	    {
8106	      /* If this is a pre-increment and we have incremented the value
8107		 where it lives, copy the incremented value to RELOADREG to
8108		 be used as an address.  */
8109
8110	      if (! post)
8111		emit_insn (gen_move_insn (reloadreg, incloc));
8112
8113	      return add_insn;
8114	    }
8115	}
8116      delete_insns_since (last);
8117    }
8118
8119  /* If couldn't do the increment directly, must increment in RELOADREG.
8120     The way we do this depends on whether this is pre- or post-increment.
8121     For pre-increment, copy INCLOC to the reload register, increment it
8122     there, then save back.  */
8123
8124  if (! post)
8125    {
8126      if (in != reloadreg)
8127	emit_insn (gen_move_insn (reloadreg, real_in));
8128      emit_insn (gen_add2_insn (reloadreg, inc));
8129      store = emit_insn (gen_move_insn (incloc, reloadreg));
8130    }
8131  else
8132    {
8133      /* Postincrement.
8134	 Because this might be a jump insn or a compare, and because RELOADREG
8135	 may not be available after the insn in an input reload, we must do
8136	 the incrementation before the insn being reloaded for.
8137
8138	 We have already copied IN to RELOADREG.  Increment the copy in
8139	 RELOADREG, save that back, then decrement RELOADREG so it has
8140	 the original value.  */
8141
8142      emit_insn (gen_add2_insn (reloadreg, inc));
8143      store = emit_insn (gen_move_insn (incloc, reloadreg));
8144      emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
8145    }
8146
8147  return store;
8148}
8149
8150#ifdef AUTO_INC_DEC
8151static void
8152add_auto_inc_notes (rtx insn, rtx x)
8153{
8154  enum rtx_code code = GET_CODE (x);
8155  const char *fmt;
8156  int i, j;
8157
8158  if (code == MEM && auto_inc_p (XEXP (x, 0)))
8159    {
8160      REG_NOTES (insn)
8161	= gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
8162      return;
8163    }
8164
8165  /* Scan all the operand sub-expressions.  */
8166  fmt = GET_RTX_FORMAT (code);
8167  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8168    {
8169      if (fmt[i] == 'e')
8170	add_auto_inc_notes (insn, XEXP (x, i));
8171      else if (fmt[i] == 'E')
8172	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8173	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
8174    }
8175}
8176#endif
8177
8178/* Copy EH notes from an insn to its reloads.  */
8179static void
8180copy_eh_notes (rtx insn, rtx x)
8181{
8182  rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8183  if (eh_note)
8184    {
8185      for (; x != 0; x = NEXT_INSN (x))
8186	{
8187	  if (may_trap_p (PATTERN (x)))
8188	    REG_NOTES (x)
8189	      = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8190				   REG_NOTES (x));
8191	}
8192    }
8193}
8194
8195/* This is used by reload pass, that does emit some instructions after
8196   abnormal calls moving basic block end, but in fact it wants to emit
8197   them on the edge.  Looks for abnormal call edges, find backward the
8198   proper call and fix the damage.
8199
8200   Similar handle instructions throwing exceptions internally.  */
8201void
8202fixup_abnormal_edges (void)
8203{
8204  bool inserted = false;
8205  basic_block bb;
8206
8207  FOR_EACH_BB (bb)
8208    {
8209      edge e;
8210      edge_iterator ei;
8211
8212      /* Look for cases we are interested in - calls or instructions causing
8213         exceptions.  */
8214      FOR_EACH_EDGE (e, ei, bb->succs)
8215	{
8216	  if (e->flags & EDGE_ABNORMAL_CALL)
8217	    break;
8218	  if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8219	      == (EDGE_ABNORMAL | EDGE_EH))
8220	    break;
8221	}
8222      if (e && !CALL_P (BB_END (bb))
8223	  && !can_throw_internal (BB_END (bb)))
8224	{
8225	  rtx insn;
8226
8227	  /* Get past the new insns generated.  Allow notes, as the insns
8228	     may be already deleted.  */
8229	  insn = BB_END (bb);
8230	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8231		 && !can_throw_internal (insn)
8232		 && insn != BB_HEAD (bb))
8233	    insn = PREV_INSN (insn);
8234
8235	  if (CALL_P (insn) || can_throw_internal (insn))
8236	    {
8237	      rtx stop, next;
8238
8239	      stop = NEXT_INSN (BB_END (bb));
8240	      BB_END (bb) = insn;
8241	      insn = NEXT_INSN (insn);
8242
8243	      FOR_EACH_EDGE (e, ei, bb->succs)
8244		if (e->flags & EDGE_FALLTHRU)
8245		  break;
8246
8247	      while (insn && insn != stop)
8248		{
8249		  next = NEXT_INSN (insn);
8250		  if (INSN_P (insn))
8251		    {
8252	              delete_insn (insn);
8253
8254		      /* Sometimes there's still the return value USE.
8255			 If it's placed after a trapping call (i.e. that
8256			 call is the last insn anyway), we have no fallthru
8257			 edge.  Simply delete this use and don't try to insert
8258			 on the non-existent edge.  */
8259		      if (GET_CODE (PATTERN (insn)) != USE)
8260			{
8261			  /* We're not deleting it, we're moving it.  */
8262			  INSN_DELETED_P (insn) = 0;
8263			  PREV_INSN (insn) = NULL_RTX;
8264			  NEXT_INSN (insn) = NULL_RTX;
8265
8266			  insert_insn_on_edge (insn, e);
8267			  inserted = true;
8268			}
8269		    }
8270		  insn = next;
8271		}
8272	    }
8273
8274	  /* It may be that we don't find any such trapping insn.  In this
8275	     case we discovered quite late that the insn that had been
8276	     marked as can_throw_internal in fact couldn't trap at all.
8277	     So we should in fact delete the EH edges out of the block.  */
8278	  else
8279	    purge_dead_edges (bb);
8280	}
8281    }
8282
8283  /* We've possibly turned single trapping insn into multiple ones.  */
8284  if (flag_non_call_exceptions)
8285    {
8286      sbitmap blocks;
8287      blocks = sbitmap_alloc (last_basic_block);
8288      sbitmap_ones (blocks);
8289      find_many_sub_basic_blocks (blocks);
8290    }
8291
8292  if (inserted)
8293    commit_edge_insertions ();
8294
8295#ifdef ENABLE_CHECKING
8296  /* Verify that we didn't turn one trapping insn into many, and that
8297     we found and corrected all of the problems wrt fixups on the
8298     fallthru edge.  */
8299  verify_flow_info ();
8300#endif
8301}
8302