1/* Reload pseudo regs into hard regs for insns that require hard regs.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4   2011 Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26
27#include "machmode.h"
28#include "hard-reg-set.h"
29#include "rtl.h"
30#include "tm_p.h"
31#include "obstack.h"
32#include "insn-config.h"
33#include "flags.h"
34#include "function.h"
35#include "expr.h"
36#include "optabs.h"
37#include "regs.h"
38#include "addresses.h"
39#include "basic-block.h"
40#include "reload.h"
41#include "recog.h"
42#include "output.h"
43#include "real.h"
44#include "toplev.h"
45#include "except.h"
46#include "tree.h"
47#include "ira.h"
48#include "df.h"
49#include "target.h"
50#include "emit-rtl.h"
51
52/* This file contains the reload pass of the compiler, which is
53   run after register allocation has been done.  It checks that
54   each insn is valid (operands required to be in registers really
55   are in registers of the proper class) and fixes up invalid ones
56   by copying values temporarily into registers for the insns
57   that need them.
58
59   The results of register allocation are described by the vector
60   reg_renumber; the insns still contain pseudo regs, but reg_renumber
61   can be used to find which hard reg, if any, a pseudo reg is in.
62
63   The technique we always use is to free up a few hard regs that are
64   called ``reload regs'', and for each place where a pseudo reg
65   must be in a hard reg, copy it temporarily into one of the reload regs.
66
67   Reload regs are allocated locally for every instruction that needs
68   reloads.  When there are pseudos which are allocated to a register that
69   has been chosen as a reload reg, such pseudos must be ``spilled''.
70   This means that they go to other hard regs, or to stack slots if no other
71   available hard regs can be found.  Spilling can invalidate more
72   insns, requiring additional need for reloads, so we must keep checking
73   until the process stabilizes.
74
75   For machines with different classes of registers, we must keep track
76   of the register class needed for each reload, and make sure that
77   we allocate enough reload registers of each class.
78
79   The file reload.c contains the code that checks one insn for
80   validity and reports the reloads that it needs.  This file
81   is in charge of scanning the entire rtl code, accumulating the
82   reload needs, spilling, assigning reload registers to use for
83   fixing up each insn, and generating the new insns to copy values
84   into the reload registers.  */
85
86/* During reload_as_needed, element N contains a REG rtx for the hard reg
87   into which reg N has been reloaded (perhaps for a previous insn).  */
88static rtx *reg_last_reload_reg;
89
90/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91   for an output reload that stores into reg N.  */
92static regset_head reg_has_output_reload;
93
94/* Indicates which hard regs are reload-registers for an output reload
95   in the current insn.  */
96static HARD_REG_SET reg_is_output_reload;
97
98/* Element N is the constant value to which pseudo reg N is equivalent,
99   or zero if pseudo reg N is not equivalent to a constant.
100   find_reloads looks at this in order to replace pseudo reg N
101   with the constant it stands for.  */
102rtx *reg_equiv_constant;
103
104/* Element N is an invariant value to which pseudo reg N is equivalent.
105   eliminate_regs_in_insn uses this to replace pseudos in particular
106   contexts.  */
107rtx *reg_equiv_invariant;
108
109/* Element N is a memory location to which pseudo reg N is equivalent,
110   prior to any register elimination (such as frame pointer to stack
111   pointer).  Depending on whether or not it is a valid address, this value
112   is transferred to either reg_equiv_address or reg_equiv_mem.  */
113rtx *reg_equiv_memory_loc;
114
115/* We allocate reg_equiv_memory_loc inside a varray so that the garbage
116   collector can keep track of what is inside.  */
117VEC(rtx,gc) *reg_equiv_memory_loc_vec;
118
119/* Element N is the address of stack slot to which pseudo reg N is equivalent.
120   This is used when the address is not valid as a memory address
121   (because its displacement is too big for the machine.)  */
122rtx *reg_equiv_address;
123
124/* Element N is the memory slot to which pseudo reg N is equivalent,
125   or zero if pseudo reg N is not equivalent to a memory slot.  */
126rtx *reg_equiv_mem;
127
128/* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
129   alternate representations of the location of pseudo reg N.  */
130rtx *reg_equiv_alt_mem_list;
131
132/* Widest width in which each pseudo reg is referred to (via subreg).  */
133static unsigned int *reg_max_ref_width;
134
135/* Element N is the list of insns that initialized reg N from its equivalent
136   constant or memory slot.  */
137rtx *reg_equiv_init;
138int reg_equiv_init_size;
139
140/* Vector to remember old contents of reg_renumber before spilling.  */
141static short *reg_old_renumber;
142
143/* During reload_as_needed, element N contains the last pseudo regno reloaded
144   into hard register N.  If that pseudo reg occupied more than one register,
145   reg_reloaded_contents points to that pseudo for each spill register in
146   use; all of these must remain set for an inheritance to occur.  */
147static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
148
149/* During reload_as_needed, element N contains the insn for which
150   hard register N was last used.   Its contents are significant only
151   when reg_reloaded_valid is set for this register.  */
152static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
153
154/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
155static HARD_REG_SET reg_reloaded_valid;
156/* Indicate if the register was dead at the end of the reload.
157   This is only valid if reg_reloaded_contents is set and valid.  */
158static HARD_REG_SET reg_reloaded_dead;
159
160/* Indicate whether the register's current value is one that is not
161   safe to retain across a call, even for registers that are normally
162   call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
163static HARD_REG_SET reg_reloaded_call_part_clobbered;
164
165/* Number of spill-regs so far; number of valid elements of spill_regs.  */
166static int n_spills;
167
168/* In parallel with spill_regs, contains REG rtx's for those regs.
169   Holds the last rtx used for any given reg, or 0 if it has never
170   been used for spilling yet.  This rtx is reused, provided it has
171   the proper mode.  */
172static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
173
174/* In parallel with spill_regs, contains nonzero for a spill reg
175   that was stored after the last time it was used.
176   The precise value is the insn generated to do the store.  */
177static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
178
179/* This is the register that was stored with spill_reg_store.  This is a
180   copy of reload_out / reload_out_reg when the value was stored; if
181   reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
182static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
183
184/* This table is the inverse mapping of spill_regs:
185   indexed by hard reg number,
186   it contains the position of that reg in spill_regs,
187   or -1 for something that is not in spill_regs.
188
189   ?!?  This is no longer accurate.  */
190static short spill_reg_order[FIRST_PSEUDO_REGISTER];
191
192/* This reg set indicates registers that can't be used as spill registers for
193   the currently processed insn.  These are the hard registers which are live
194   during the insn, but not allocated to pseudos, as well as fixed
195   registers.  */
196static HARD_REG_SET bad_spill_regs;
197
198/* These are the hard registers that can't be used as spill register for any
199   insn.  This includes registers used for user variables and registers that
200   we can't eliminate.  A register that appears in this set also can't be used
201   to retry register allocation.  */
202static HARD_REG_SET bad_spill_regs_global;
203
204/* Describes order of use of registers for reloading
205   of spilled pseudo-registers.  `n_spills' is the number of
206   elements that are actually valid; new ones are added at the end.
207
208   Both spill_regs and spill_reg_order are used on two occasions:
209   once during find_reload_regs, where they keep track of the spill registers
210   for a single insn, but also during reload_as_needed where they show all
211   the registers ever used by reload.  For the latter case, the information
212   is calculated during finish_spills.  */
213static short spill_regs[FIRST_PSEUDO_REGISTER];
214
215/* This vector of reg sets indicates, for each pseudo, which hard registers
216   may not be used for retrying global allocation because the register was
217   formerly spilled from one of them.  If we allowed reallocating a pseudo to
218   a register that it was already allocated to, reload might not
219   terminate.  */
220static HARD_REG_SET *pseudo_previous_regs;
221
222/* This vector of reg sets indicates, for each pseudo, which hard
223   registers may not be used for retrying global allocation because they
224   are used as spill registers during one of the insns in which the
225   pseudo is live.  */
226static HARD_REG_SET *pseudo_forbidden_regs;
227
228/* All hard regs that have been used as spill registers for any insn are
229   marked in this set.  */
230static HARD_REG_SET used_spill_regs;
231
232/* Index of last register assigned as a spill register.  We allocate in
233   a round-robin fashion.  */
234static int last_spill_reg;
235
236/* Nonzero if indirect addressing is supported on the machine; this means
237   that spilling (REG n) does not require reloading it into a register in
238   order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))).  The
239   value indicates the level of indirect addressing supported, e.g., two
240   means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241   a hard register.  */
242static char spill_indirect_levels;
243
244/* Nonzero if indirect addressing is supported when the innermost MEM is
245   of the form (MEM (SYMBOL_REF sym)).  It is assumed that the level to
246   which these are valid is the same as spill_indirect_levels, above.  */
247char indirect_symref_ok;
248
249/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid.  */
250char double_reg_address_ok;
251
252/* Record the stack slot for each spilled hard register.  */
253static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
254
255/* Width allocated so far for that stack slot.  */
256static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
257
258/* Record which pseudos needed to be spilled.  */
259static regset_head spilled_pseudos;
260
261/* Record which pseudos changed their allocation in finish_spills.  */
262static regset_head changed_allocation_pseudos;
263
264/* Used for communication between order_regs_for_reload and count_pseudo.
265   Used to avoid counting one pseudo twice.  */
266static regset_head pseudos_counted;
267
268/* First uid used by insns created by reload in this function.
269   Used in find_equiv_reg.  */
270int reload_first_uid;
271
272/* Flag set by local-alloc or global-alloc if anything is live in
273   a call-clobbered reg across calls.  */
274int caller_save_needed;
275
276/* Set to 1 while reload_as_needed is operating.
277   Required by some machines to handle any generated moves differently.  */
278int reload_in_progress = 0;
279
280/* These arrays record the insn_code of insns that may be needed to
281   perform input and output reloads of special objects.  They provide a
282   place to pass a scratch register.  */
283enum insn_code reload_in_optab[NUM_MACHINE_MODES];
284enum insn_code reload_out_optab[NUM_MACHINE_MODES];
285
286/* This obstack is used for allocation of rtl during register elimination.
287   The allocated storage can be freed once find_reloads has processed the
288   insn.  */
289static struct obstack reload_obstack;
290
291/* Points to the beginning of the reload_obstack.  All insn_chain structures
292   are allocated first.  */
293static char *reload_startobj;
294
295/* The point after all insn_chain structures.  Used to quickly deallocate
296   memory allocated in copy_reloads during calculate_needs_all_insns.  */
297static char *reload_firstobj;
298
299/* This points before all local rtl generated by register elimination.
300   Used to quickly free all memory after processing one insn.  */
301static char *reload_insn_firstobj;
302
303/* List of insn_chain instructions, one for every insn that reload needs to
304   examine.  */
305struct insn_chain *reload_insn_chain;
306
307/* List of all insns needing reloads.  */
308static struct insn_chain *insns_need_reload;
309
310/* This structure is used to record information about register eliminations.
311   Each array entry describes one possible way of eliminating a register
312   in favor of another.   If there is more than one way of eliminating a
313   particular register, the most preferred should be specified first.  */
314
315struct elim_table
316{
317  int from;			/* Register number to be eliminated.  */
318  int to;			/* Register number used as replacement.  */
319  HOST_WIDE_INT initial_offset;	/* Initial difference between values.  */
320  int can_eliminate;		/* Nonzero if this elimination can be done.  */
321  int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
322				   target hook in previous scan over insns
323				   made by reload.  */
324  HOST_WIDE_INT offset;		/* Current offset between the two regs.  */
325  HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
326  int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
327  rtx from_rtx;			/* REG rtx for the register to be eliminated.
328				   We cannot simply compare the number since
329				   we might then spuriously replace a hard
330				   register corresponding to a pseudo
331				   assigned to the reg to be eliminated.  */
332  rtx to_rtx;			/* REG rtx for the replacement.  */
333};
334
335static struct elim_table *reg_eliminate = 0;
336
337/* This is an intermediate structure to initialize the table.  It has
338   exactly the members provided by ELIMINABLE_REGS.  */
339static const struct elim_table_1
340{
341  const int from;
342  const int to;
343} reg_eliminate_1[] =
344
345/* If a set of eliminable registers was specified, define the table from it.
346   Otherwise, default to the normal case of the frame pointer being
347   replaced by the stack pointer.  */
348
349#ifdef ELIMINABLE_REGS
350  ELIMINABLE_REGS;
351#else
352  {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
353#endif
354
355#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
356
357/* Record the number of pending eliminations that have an offset not equal
358   to their initial offset.  If nonzero, we use a new copy of each
359   replacement result in any insns encountered.  */
360int num_not_at_initial_offset;
361
362/* Count the number of registers that we may be able to eliminate.  */
363static int num_eliminable;
364/* And the number of registers that are equivalent to a constant that
365   can be eliminated to frame_pointer / arg_pointer + constant.  */
366static int num_eliminable_invariants;
367
368/* For each label, we record the offset of each elimination.  If we reach
369   a label by more than one path and an offset differs, we cannot do the
370   elimination.  This information is indexed by the difference of the
371   number of the label and the first label number.  We can't offset the
372   pointer itself as this can cause problems on machines with segmented
373   memory.  The first table is an array of flags that records whether we
374   have yet encountered a label and the second table is an array of arrays,
375   one entry in the latter array for each elimination.  */
376
377static int first_label_num;
378static char *offsets_known_at;
379static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
380
381/* Stack of addresses where an rtx has been changed.  We can undo the
382   changes by popping items off the stack and restoring the original
383   value at each location.
384
385   We use this simplistic undo capability rather than copy_rtx as copy_rtx
386   will not make a deep copy of a normally sharable rtx, such as
387   (const (plus (symbol_ref) (const_int))).  If such an expression appears
388   as R1 in gen_reload_chain_without_interm_reg_p, then a shared
389   rtx expression would be changed.  See PR 42431.  */
390
391typedef rtx *rtx_p;
392DEF_VEC_P(rtx_p);
393DEF_VEC_ALLOC_P(rtx_p,heap);
394static VEC(rtx_p,heap) *substitute_stack;
395
396/* Number of labels in the current function.  */
397
398static int num_labels;
399
400static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
401static void maybe_fix_stack_asms (void);
402static void copy_reloads (struct insn_chain *);
403static void calculate_needs_all_insns (int);
404static int find_reg (struct insn_chain *, int);
405static void find_reload_regs (struct insn_chain *);
406static void select_reload_regs (void);
407static void delete_caller_save_insns (void);
408
409static void spill_failure (rtx, enum reg_class);
410static void count_spilled_pseudo (int, int, int);
411static void delete_dead_insn (rtx);
412static void alter_reg (int, int, bool);
413static void set_label_offsets (rtx, rtx, int);
414static void check_eliminable_occurrences (rtx);
415static void elimination_effects (rtx, enum machine_mode);
416static int eliminate_regs_in_insn (rtx, int);
417static void update_eliminable_offsets (void);
418static void mark_not_eliminable (rtx, const_rtx, void *);
419static void set_initial_elim_offsets (void);
420static bool verify_initial_elim_offsets (void);
421static void set_initial_label_offsets (void);
422static void set_offsets_for_label (rtx);
423static void init_elim_table (void);
424static void update_eliminables (HARD_REG_SET *);
425static void spill_hard_reg (unsigned int, int);
426static int finish_spills (int);
427static void scan_paradoxical_subregs (rtx);
428static void count_pseudo (int);
429static void order_regs_for_reload (struct insn_chain *);
430static void reload_as_needed (int);
431static void forget_old_reloads_1 (rtx, const_rtx, void *);
432static void forget_marked_reloads (regset);
433static int reload_reg_class_lower (const void *, const void *);
434static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
435				    enum machine_mode);
436static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
437				     enum machine_mode);
438static int reload_reg_free_p (unsigned int, int, enum reload_type);
439static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
440					rtx, rtx, int, int);
441static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
442			     rtx, rtx, int, int);
443static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
444static int allocate_reload_reg (struct insn_chain *, int, int);
445static int conflicts_with_override (rtx);
446static void failed_reload (rtx, int);
447static int set_reload_reg (int, int);
448static void choose_reload_regs_init (struct insn_chain *, rtx *);
449static void choose_reload_regs (struct insn_chain *);
450static void emit_input_reload_insns (struct insn_chain *, struct reload *,
451				     rtx, int);
452static void emit_output_reload_insns (struct insn_chain *, struct reload *,
453				      int);
454static void do_input_reload (struct insn_chain *, struct reload *, int);
455static void do_output_reload (struct insn_chain *, struct reload *, int);
456static void emit_reload_insns (struct insn_chain *);
457static void delete_output_reload (rtx, int, int, rtx);
458static void delete_address_reloads (rtx, rtx);
459static void delete_address_reloads_1 (rtx, rtx, rtx);
460static void inc_for_reload (rtx, rtx, rtx, int);
461#ifdef AUTO_INC_DEC
462static void add_auto_inc_notes (rtx, rtx);
463#endif
464static void substitute (rtx *, const_rtx, rtx);
465static bool gen_reload_chain_without_interm_reg_p (int, int);
466static int reloads_conflict (int, int);
467static rtx gen_reload (rtx, rtx, int, enum reload_type);
468static rtx emit_insn_if_valid_for_reload (rtx);
469
470/* Initialize the reload pass.  This is called at the beginning of compilation
471   and may be called again if the target is reinitialized.  */
472
473void
474init_reload (void)
475{
476  int i;
477
478  /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
479     Set spill_indirect_levels to the number of levels such addressing is
480     permitted, zero if it is not permitted at all.  */
481
482  rtx tem
483    = gen_rtx_MEM (Pmode,
484		   gen_rtx_PLUS (Pmode,
485				 gen_rtx_REG (Pmode,
486					      LAST_VIRTUAL_REGISTER + 1),
487				 GEN_INT (4)));
488  spill_indirect_levels = 0;
489
490  while (memory_address_p (QImode, tem))
491    {
492      spill_indirect_levels++;
493      tem = gen_rtx_MEM (Pmode, tem);
494    }
495
496  /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
497
498  tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
499  indirect_symref_ok = memory_address_p (QImode, tem);
500
501  /* See if reg+reg is a valid (and offsettable) address.  */
502
503  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
504    {
505      tem = gen_rtx_PLUS (Pmode,
506			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
507			  gen_rtx_REG (Pmode, i));
508
509      /* This way, we make sure that reg+reg is an offsettable address.  */
510      tem = plus_constant (tem, 4);
511
512      if (memory_address_p (QImode, tem))
513	{
514	  double_reg_address_ok = 1;
515	  break;
516	}
517    }
518
519  /* Initialize obstack for our rtl allocation.  */
520  gcc_obstack_init (&reload_obstack);
521  reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
522
523  INIT_REG_SET (&spilled_pseudos);
524  INIT_REG_SET (&changed_allocation_pseudos);
525  INIT_REG_SET (&pseudos_counted);
526}
527
528/* List of insn chains that are currently unused.  */
529static struct insn_chain *unused_insn_chains = 0;
530
531/* Allocate an empty insn_chain structure.  */
532struct insn_chain *
533new_insn_chain (void)
534{
535  struct insn_chain *c;
536
537  if (unused_insn_chains == 0)
538    {
539      c = XOBNEW (&reload_obstack, struct insn_chain);
540      INIT_REG_SET (&c->live_throughout);
541      INIT_REG_SET (&c->dead_or_set);
542    }
543  else
544    {
545      c = unused_insn_chains;
546      unused_insn_chains = c->next;
547    }
548  c->is_caller_save_insn = 0;
549  c->need_operand_change = 0;
550  c->need_reload = 0;
551  c->need_elim = 0;
552  return c;
553}
554
555/* Small utility function to set all regs in hard reg set TO which are
556   allocated to pseudos in regset FROM.  */
557
558void
559compute_use_by_pseudos (HARD_REG_SET *to, regset from)
560{
561  unsigned int regno;
562  reg_set_iterator rsi;
563
564  EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
565    {
566      int r = reg_renumber[regno];
567
568      if (r < 0)
569	{
570	  /* reload_combine uses the information from DF_LIVE_IN,
571	     which might still contain registers that have not
572	     actually been allocated since they have an
573	     equivalence.  */
574	  gcc_assert (ira_conflicts_p || reload_completed);
575	}
576      else
577	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
578    }
579}
580
581/* Replace all pseudos found in LOC with their corresponding
582   equivalences.  */
583
584static void
585replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
586{
587  rtx x = *loc;
588  enum rtx_code code;
589  const char *fmt;
590  int i, j;
591
592  if (! x)
593    return;
594
595  code = GET_CODE (x);
596  if (code == REG)
597    {
598      unsigned int regno = REGNO (x);
599
600      if (regno < FIRST_PSEUDO_REGISTER)
601	return;
602
603      x = eliminate_regs (x, mem_mode, usage);
604      if (x != *loc)
605	{
606	  *loc = x;
607	  replace_pseudos_in (loc, mem_mode, usage);
608	  return;
609	}
610
611      if (reg_equiv_constant[regno])
612	*loc = reg_equiv_constant[regno];
613      else if (reg_equiv_mem[regno])
614	*loc = reg_equiv_mem[regno];
615      else if (reg_equiv_address[regno])
616	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
617      else
618	{
619	  gcc_assert (!REG_P (regno_reg_rtx[regno])
620		      || REGNO (regno_reg_rtx[regno]) != regno);
621	  *loc = regno_reg_rtx[regno];
622	}
623
624      return;
625    }
626  else if (code == MEM)
627    {
628      replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
629      return;
630    }
631
632  /* Process each of our operands recursively.  */
633  fmt = GET_RTX_FORMAT (code);
634  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
635    if (*fmt == 'e')
636      replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
637    else if (*fmt == 'E')
638      for (j = 0; j < XVECLEN (x, i); j++)
639	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
640}
641
642/* Determine if the current function has an exception receiver block
643   that reaches the exit block via non-exceptional edges  */
644
645static bool
646has_nonexceptional_receiver (void)
647{
648  edge e;
649  edge_iterator ei;
650  basic_block *tos, *worklist, bb;
651
652  /* If we're not optimizing, then just err on the safe side.  */
653  if (!optimize)
654    return true;
655
656  /* First determine which blocks can reach exit via normal paths.  */
657  tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
658
659  FOR_EACH_BB (bb)
660    bb->flags &= ~BB_REACHABLE;
661
662  /* Place the exit block on our worklist.  */
663  EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
664  *tos++ = EXIT_BLOCK_PTR;
665
666  /* Iterate: find everything reachable from what we've already seen.  */
667  while (tos != worklist)
668    {
669      bb = *--tos;
670
671      FOR_EACH_EDGE (e, ei, bb->preds)
672	if (!(e->flags & EDGE_ABNORMAL))
673	  {
674	    basic_block src = e->src;
675
676	    if (!(src->flags & BB_REACHABLE))
677	      {
678		src->flags |= BB_REACHABLE;
679		*tos++ = src;
680	      }
681	  }
682    }
683  free (worklist);
684
685  /* Now see if there's a reachable block with an exceptional incoming
686     edge.  */
687  FOR_EACH_BB (bb)
688    if (bb->flags & BB_REACHABLE)
689      FOR_EACH_EDGE (e, ei, bb->preds)
690	if (e->flags & EDGE_ABNORMAL)
691	  return true;
692
693  /* No exceptional block reached exit unexceptionally.  */
694  return false;
695}
696
697
698/* Global variables used by reload and its subroutines.  */
699
700/* Set during calculate_needs if an insn needs register elimination.  */
701static int something_needs_elimination;
702/* Set during calculate_needs if an insn needs an operand changed.  */
703static int something_needs_operands_changed;
704
705/* Nonzero means we couldn't get enough spill regs.  */
706static int failure;
707
708/* Temporary array of pseudo-register number.  */
709static int *temp_pseudo_reg_arr;
710
711/* Main entry point for the reload pass.
712
713   FIRST is the first insn of the function being compiled.
714
715   GLOBAL nonzero means we were called from global_alloc
716   and should attempt to reallocate any pseudoregs that we
717   displace from hard regs we will use for reloads.
718   If GLOBAL is zero, we do not have enough information to do that,
719   so any pseudo reg that is spilled must go to the stack.
720
721   Return value is nonzero if reload failed
722   and we must not do any more for this function.  */
723
724int
725reload (rtx first, int global)
726{
727  int i, n;
728  rtx insn;
729  struct elim_table *ep;
730  basic_block bb;
731
732  /* Make sure even insns with volatile mem refs are recognizable.  */
733  init_recog ();
734
735  failure = 0;
736
737  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
738
739  /* Make sure that the last insn in the chain
740     is not something that needs reloading.  */
741  emit_note (NOTE_INSN_DELETED);
742
743  /* Enable find_equiv_reg to distinguish insns made by reload.  */
744  reload_first_uid = get_max_uid ();
745
746#ifdef SECONDARY_MEMORY_NEEDED
747  /* Initialize the secondary memory table.  */
748  clear_secondary_mem ();
749#endif
750
751  /* We don't have a stack slot for any spill reg yet.  */
752  memset (spill_stack_slot, 0, sizeof spill_stack_slot);
753  memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
754
755  /* Initialize the save area information for caller-save, in case some
756     are needed.  */
757  init_save_areas ();
758
759  /* Compute which hard registers are now in use
760     as homes for pseudo registers.
761     This is done here rather than (eg) in global_alloc
762     because this point is reached even if not optimizing.  */
763  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
764    mark_home_live (i);
765
766  /* A function that has a nonlocal label that can reach the exit
767     block via non-exceptional paths must save all call-saved
768     registers.  */
769  if (cfun->has_nonlocal_label
770      && has_nonexceptional_receiver ())
771    crtl->saves_all_registers = 1;
772
773  if (crtl->saves_all_registers)
774    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
775      if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
776	df_set_regs_ever_live (i, true);
777
778  /* Find all the pseudo registers that didn't get hard regs
779     but do have known equivalent constants or memory slots.
780     These include parameters (known equivalent to parameter slots)
781     and cse'd or loop-moved constant memory addresses.
782
783     Record constant equivalents in reg_equiv_constant
784     so they will be substituted by find_reloads.
785     Record memory equivalents in reg_mem_equiv so they can
786     be substituted eventually by altering the REG-rtx's.  */
787
788  reg_equiv_constant = XCNEWVEC (rtx, max_regno);
789  reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
790  reg_equiv_mem = XCNEWVEC (rtx, max_regno);
791  reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
792  reg_equiv_address = XCNEWVEC (rtx, max_regno);
793  reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
794  reg_old_renumber = XCNEWVEC (short, max_regno);
795  memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
796  pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
797  pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
798
799  CLEAR_HARD_REG_SET (bad_spill_regs_global);
800
801  /* Look for REG_EQUIV notes; record what each pseudo is equivalent
802     to.  Also find all paradoxical subregs and find largest such for
803     each pseudo.  */
804
805  num_eliminable_invariants = 0;
806  for (insn = first; insn; insn = NEXT_INSN (insn))
807    {
808      rtx set = single_set (insn);
809
810      /* We may introduce USEs that we want to remove at the end, so
811	 we'll mark them with QImode.  Make sure there are no
812	 previously-marked insns left by say regmove.  */
813      if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
814	  && GET_MODE (insn) != VOIDmode)
815	PUT_MODE (insn, VOIDmode);
816
817      if (NONDEBUG_INSN_P (insn))
818	scan_paradoxical_subregs (PATTERN (insn));
819
820      if (set != 0 && REG_P (SET_DEST (set)))
821	{
822	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
823	  rtx x;
824
825	  if (! note)
826	    continue;
827
828	  i = REGNO (SET_DEST (set));
829	  x = XEXP (note, 0);
830
831	  if (i <= LAST_VIRTUAL_REGISTER)
832	    continue;
833
834	  if (! function_invariant_p (x)
835	      || ! flag_pic
836	      /* A function invariant is often CONSTANT_P but may
837		 include a register.  We promise to only pass
838		 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P.  */
839	      || (CONSTANT_P (x)
840		  && LEGITIMATE_PIC_OPERAND_P (x)))
841	    {
842	      /* It can happen that a REG_EQUIV note contains a MEM
843		 that is not a legitimate memory operand.  As later
844		 stages of reload assume that all addresses found
845		 in the reg_equiv_* arrays were originally legitimate,
846		 we ignore such REG_EQUIV notes.  */
847	      if (memory_operand (x, VOIDmode))
848		{
849		  /* Always unshare the equivalence, so we can
850		     substitute into this insn without touching the
851		       equivalence.  */
852		  reg_equiv_memory_loc[i] = copy_rtx (x);
853		}
854	      else if (function_invariant_p (x))
855		{
856		  if (GET_CODE (x) == PLUS)
857		    {
858		      /* This is PLUS of frame pointer and a constant,
859			 and might be shared.  Unshare it.  */
860		      reg_equiv_invariant[i] = copy_rtx (x);
861		      num_eliminable_invariants++;
862		    }
863		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
864		    {
865		      reg_equiv_invariant[i] = x;
866		      num_eliminable_invariants++;
867		    }
868		  else if (LEGITIMATE_CONSTANT_P (x))
869		    reg_equiv_constant[i] = x;
870		  else
871		    {
872		      reg_equiv_memory_loc[i]
873			= force_const_mem (GET_MODE (SET_DEST (set)), x);
874		      if (! reg_equiv_memory_loc[i])
875			reg_equiv_init[i] = NULL_RTX;
876		    }
877		}
878	      else
879		{
880		  reg_equiv_init[i] = NULL_RTX;
881		  continue;
882		}
883	    }
884	  else
885	    reg_equiv_init[i] = NULL_RTX;
886	}
887    }
888
889  if (dump_file)
890    for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
891      if (reg_equiv_init[i])
892	{
893	  fprintf (dump_file, "init_insns for %u: ", i);
894	  print_inline_rtx (dump_file, reg_equiv_init[i], 20);
895	  fprintf (dump_file, "\n");
896	}
897
898  init_elim_table ();
899
900  first_label_num = get_first_label_num ();
901  num_labels = max_label_num () - first_label_num;
902
903  /* Allocate the tables used to store offset information at labels.  */
904  /* We used to use alloca here, but the size of what it would try to
905     allocate would occasionally cause it to exceed the stack limit and
906     cause a core dump.  */
907  offsets_known_at = XNEWVEC (char, num_labels);
908  offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
909
910  /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
911     stack slots to the pseudos that lack hard regs or equivalents.
912     Do not touch virtual registers.  */
913
914  temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
915  for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
916    temp_pseudo_reg_arr[n++] = i;
917
918  if (ira_conflicts_p)
919    /* Ask IRA to order pseudo-registers for better stack slot
920       sharing.  */
921    ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
922
923  for (i = 0; i < n; i++)
924    alter_reg (temp_pseudo_reg_arr[i], -1, false);
925
926  /* If we have some registers we think can be eliminated, scan all insns to
927     see if there is an insn that sets one of these registers to something
928     other than itself plus a constant.  If so, the register cannot be
929     eliminated.  Doing this scan here eliminates an extra pass through the
930     main reload loop in the most common case where register elimination
931     cannot be done.  */
932  for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
933    if (INSN_P (insn))
934      note_stores (PATTERN (insn), mark_not_eliminable, NULL);
935
936  maybe_fix_stack_asms ();
937
938  insns_need_reload = 0;
939  something_needs_elimination = 0;
940
941  /* Initialize to -1, which means take the first spill register.  */
942  last_spill_reg = -1;
943
944  /* Spill any hard regs that we know we can't eliminate.  */
945  CLEAR_HARD_REG_SET (used_spill_regs);
946  /* There can be multiple ways to eliminate a register;
947     they should be listed adjacently.
948     Elimination for any register fails only if all possible ways fail.  */
949  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
950    {
951      int from = ep->from;
952      int can_eliminate = 0;
953      do
954	{
955          can_eliminate |= ep->can_eliminate;
956          ep++;
957	}
958      while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
959      if (! can_eliminate)
960	spill_hard_reg (from, 1);
961    }
962
963#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
964  if (frame_pointer_needed)
965    spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
966#endif
967  finish_spills (global);
968
969  /* From now on, we may need to generate moves differently.  We may also
970     allow modifications of insns which cause them to not be recognized.
971     Any such modifications will be cleaned up during reload itself.  */
972  reload_in_progress = 1;
973
974  /* This loop scans the entire function each go-round
975     and repeats until one repetition spills no additional hard regs.  */
976  for (;;)
977    {
978      int something_changed;
979      int did_spill;
980      HOST_WIDE_INT starting_frame_size;
981
982      starting_frame_size = get_frame_size ();
983
984      set_initial_elim_offsets ();
985      set_initial_label_offsets ();
986
987      /* For each pseudo register that has an equivalent location defined,
988	 try to eliminate any eliminable registers (such as the frame pointer)
989	 assuming initial offsets for the replacement register, which
990	 is the normal case.
991
992	 If the resulting location is directly addressable, substitute
993	 the MEM we just got directly for the old REG.
994
995	 If it is not addressable but is a constant or the sum of a hard reg
996	 and constant, it is probably not addressable because the constant is
997	 out of range, in that case record the address; we will generate
998	 hairy code to compute the address in a register each time it is
999	 needed.  Similarly if it is a hard register, but one that is not
1000	 valid as an address register.
1001
1002	 If the location is not addressable, but does not have one of the
1003	 above forms, assign a stack slot.  We have to do this to avoid the
1004	 potential of producing lots of reloads if, e.g., a location involves
1005	 a pseudo that didn't get a hard register and has an equivalent memory
1006	 location that also involves a pseudo that didn't get a hard register.
1007
1008	 Perhaps at some point we will improve reload_when_needed handling
1009	 so this problem goes away.  But that's very hairy.  */
1010
1011      for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1012	if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
1013	  {
1014	    rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
1015				    NULL_RTX);
1016
1017	    if (strict_memory_address_addr_space_p
1018		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
1019		   MEM_ADDR_SPACE (x)))
1020	      reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1021	    else if (CONSTANT_P (XEXP (x, 0))
1022		     || (REG_P (XEXP (x, 0))
1023			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
1024		     || (GET_CODE (XEXP (x, 0)) == PLUS
1025			 && REG_P (XEXP (XEXP (x, 0), 0))
1026			 && (REGNO (XEXP (XEXP (x, 0), 0))
1027			     < FIRST_PSEUDO_REGISTER)
1028			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
1029	      reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
1030	    else
1031	      {
1032		/* Make a new stack slot.  Then indicate that something
1033		   changed so we go back and recompute offsets for
1034		   eliminable registers because the allocation of memory
1035		   below might change some offset.  reg_equiv_{mem,address}
1036		   will be set up for this pseudo on the next pass around
1037		   the loop.  */
1038		reg_equiv_memory_loc[i] = 0;
1039		reg_equiv_init[i] = 0;
1040		alter_reg (i, -1, true);
1041	      }
1042	  }
1043
1044      if (caller_save_needed)
1045	setup_save_areas ();
1046
1047      /* If we allocated another stack slot, redo elimination bookkeeping.  */
1048      if (starting_frame_size != get_frame_size ())
1049	continue;
1050      if (starting_frame_size && crtl->stack_alignment_needed)
1051	{
1052	  /* If we have a stack frame, we must align it now.  The
1053	     stack size may be a part of the offset computation for
1054	     register elimination.  So if this changes the stack size,
1055	     then repeat the elimination bookkeeping.  We don't
1056	     realign when there is no stack, as that will cause a
1057	     stack frame when none is needed should
1058	     STARTING_FRAME_OFFSET not be already aligned to
1059	     STACK_BOUNDARY.  */
1060	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
1061	  if (starting_frame_size != get_frame_size ())
1062	    continue;
1063	}
1064
1065      if (caller_save_needed)
1066	{
1067	  save_call_clobbered_regs ();
1068	  /* That might have allocated new insn_chain structures.  */
1069	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1070	}
1071
1072      calculate_needs_all_insns (global);
1073
1074      if (! ira_conflicts_p)
1075	/* Don't do it for IRA.  We need this info because we don't
1076	   change live_throughout and dead_or_set for chains when IRA
1077	   is used.  */
1078	CLEAR_REG_SET (&spilled_pseudos);
1079
1080      did_spill = 0;
1081
1082      something_changed = 0;
1083
1084      /* If we allocated any new memory locations, make another pass
1085	 since it might have changed elimination offsets.  */
1086      if (starting_frame_size != get_frame_size ())
1087	something_changed = 1;
1088
1089      /* Even if the frame size remained the same, we might still have
1090	 changed elimination offsets, e.g. if find_reloads called
1091	 force_const_mem requiring the back end to allocate a constant
1092	 pool base register that needs to be saved on the stack.  */
1093      else if (!verify_initial_elim_offsets ())
1094	something_changed = 1;
1095
1096      {
1097	HARD_REG_SET to_spill;
1098	CLEAR_HARD_REG_SET (to_spill);
1099	update_eliminables (&to_spill);
1100	AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
1101
1102	for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1103	  if (TEST_HARD_REG_BIT (to_spill, i))
1104	    {
1105	      spill_hard_reg (i, 1);
1106	      did_spill = 1;
1107
1108	      /* Regardless of the state of spills, if we previously had
1109		 a register that we thought we could eliminate, but now can
1110		 not eliminate, we must run another pass.
1111
1112		 Consider pseudos which have an entry in reg_equiv_* which
1113		 reference an eliminable register.  We must make another pass
1114		 to update reg_equiv_* so that we do not substitute in the
1115		 old value from when we thought the elimination could be
1116		 performed.  */
1117	      something_changed = 1;
1118	    }
1119      }
1120
1121      select_reload_regs ();
1122      if (failure)
1123	goto failed;
1124
1125      if (insns_need_reload != 0 || did_spill)
1126	something_changed |= finish_spills (global);
1127
1128      if (! something_changed)
1129	break;
1130
1131      if (caller_save_needed)
1132	delete_caller_save_insns ();
1133
1134      obstack_free (&reload_obstack, reload_firstobj);
1135    }
1136
1137  /* If global-alloc was run, notify it of any register eliminations we have
1138     done.  */
1139  if (global)
1140    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1141      if (ep->can_eliminate)
1142	mark_elimination (ep->from, ep->to);
1143
1144  /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1145     If that insn didn't set the register (i.e., it copied the register to
1146     memory), just delete that insn instead of the equivalencing insn plus
1147     anything now dead.  If we call delete_dead_insn on that insn, we may
1148     delete the insn that actually sets the register if the register dies
1149     there and that is incorrect.  */
1150
1151  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1152    {
1153      if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1154	{
1155	  rtx list;
1156	  for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1157	    {
1158	      rtx equiv_insn = XEXP (list, 0);
1159
1160	      /* If we already deleted the insn or if it may trap, we can't
1161		 delete it.  The latter case shouldn't happen, but can
1162		 if an insn has a variable address, gets a REG_EH_REGION
1163		 note added to it, and then gets converted into a load
1164		 from a constant address.  */
1165	      if (NOTE_P (equiv_insn)
1166		  || can_throw_internal (equiv_insn))
1167		;
1168	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1169		delete_dead_insn (equiv_insn);
1170	      else
1171		SET_INSN_DELETED (equiv_insn);
1172	    }
1173	}
1174    }
1175
1176  /* Use the reload registers where necessary
1177     by generating move instructions to move the must-be-register
1178     values into or out of the reload registers.  */
1179
1180  if (insns_need_reload != 0 || something_needs_elimination
1181      || something_needs_operands_changed)
1182    {
1183      HOST_WIDE_INT old_frame_size = get_frame_size ();
1184
1185      reload_as_needed (global);
1186
1187      gcc_assert (old_frame_size == get_frame_size ());
1188
1189      gcc_assert (verify_initial_elim_offsets ());
1190    }
1191
1192  /* If we were able to eliminate the frame pointer, show that it is no
1193     longer live at the start of any basic block.  If it ls live by
1194     virtue of being in a pseudo, that pseudo will be marked live
1195     and hence the frame pointer will be known to be live via that
1196     pseudo.  */
1197
1198  if (! frame_pointer_needed)
1199    FOR_EACH_BB (bb)
1200      bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1201
1202  /* Come here (with failure set nonzero) if we can't get enough spill
1203     regs.  */
1204 failed:
1205
1206  CLEAR_REG_SET (&changed_allocation_pseudos);
1207  CLEAR_REG_SET (&spilled_pseudos);
1208  reload_in_progress = 0;
1209
1210  /* Now eliminate all pseudo regs by modifying them into
1211     their equivalent memory references.
1212     The REG-rtx's for the pseudos are modified in place,
1213     so all insns that used to refer to them now refer to memory.
1214
1215     For a reg that has a reg_equiv_address, all those insns
1216     were changed by reloading so that no insns refer to it any longer;
1217     but the DECL_RTL of a variable decl may refer to it,
1218     and if so this causes the debugging info to mention the variable.  */
1219
1220  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1221    {
1222      rtx addr = 0;
1223
1224      if (reg_equiv_mem[i])
1225	addr = XEXP (reg_equiv_mem[i], 0);
1226
1227      if (reg_equiv_address[i])
1228	addr = reg_equiv_address[i];
1229
1230      if (addr)
1231	{
1232	  if (reg_renumber[i] < 0)
1233	    {
1234	      rtx reg = regno_reg_rtx[i];
1235
1236	      REG_USERVAR_P (reg) = 0;
1237	      PUT_CODE (reg, MEM);
1238	      XEXP (reg, 0) = addr;
1239	      if (reg_equiv_memory_loc[i])
1240		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1241	      else
1242		{
1243		  MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1244		  MEM_ATTRS (reg) = 0;
1245		}
1246	      MEM_NOTRAP_P (reg) = 1;
1247	    }
1248	  else if (reg_equiv_mem[i])
1249	    XEXP (reg_equiv_mem[i], 0) = addr;
1250	}
1251
1252      /* We don't want complex addressing modes in debug insns
1253	 if simpler ones will do, so delegitimize equivalences
1254	 in debug insns.  */
1255      if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1256	{
1257	  rtx reg = regno_reg_rtx[i];
1258	  rtx equiv = 0;
1259	  df_ref use, next;
1260
1261	  if (reg_equiv_constant[i])
1262	    equiv = reg_equiv_constant[i];
1263	  else if (reg_equiv_invariant[i])
1264	    equiv = reg_equiv_invariant[i];
1265	  else if (reg && MEM_P (reg))
1266	    equiv = targetm.delegitimize_address (reg);
1267	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1268	    equiv = reg;
1269
1270	  if (equiv == reg)
1271	    continue;
1272
1273	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1274	    {
1275	      insn = DF_REF_INSN (use);
1276
1277	      /* Make sure the next ref is for a different instruction,
1278		 so that we're not affected by the rescan.  */
1279	      next = DF_REF_NEXT_REG (use);
1280	      while (next && DF_REF_INSN (next) == insn)
1281		next = DF_REF_NEXT_REG (next);
1282
1283	      if (DEBUG_INSN_P (insn))
1284		{
1285		  if (!equiv)
1286		    {
1287		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1288		      df_insn_rescan_debug_internal (insn);
1289		    }
1290		  else
1291		    INSN_VAR_LOCATION_LOC (insn)
1292		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1293					      reg, equiv);
1294		}
1295	    }
1296	}
1297    }
1298
1299  /* We must set reload_completed now since the cleanup_subreg_operands call
1300     below will re-recognize each insn and reload may have generated insns
1301     which are only valid during and after reload.  */
1302  reload_completed = 1;
1303
1304  /* Make a pass over all the insns and delete all USEs which we inserted
1305     only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1306     notes.  Delete all CLOBBER insns, except those that refer to the return
1307     value and the special mem:BLK CLOBBERs added to prevent the scheduler
1308     from misarranging variable-array code, and simplify (subreg (reg))
1309     operands.  Strip and regenerate REG_INC notes that may have been moved
1310     around.  */
1311
1312  for (insn = first; insn; insn = NEXT_INSN (insn))
1313    if (INSN_P (insn))
1314      {
1315	rtx *pnote;
1316
1317	if (CALL_P (insn))
1318	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1319			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1320
1321	if ((GET_CODE (PATTERN (insn)) == USE
1322	     /* We mark with QImode USEs introduced by reload itself.  */
1323	     && (GET_MODE (insn) == QImode
1324		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1325	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1326		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1327		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1328		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1329			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1330				!= stack_pointer_rtx))
1331		&& (!REG_P (XEXP (PATTERN (insn), 0))
1332		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1333	  {
1334	    delete_insn (insn);
1335	    continue;
1336	  }
1337
1338	/* Some CLOBBERs may survive until here and still reference unassigned
1339	   pseudos with const equivalent, which may in turn cause ICE in later
1340	   passes if the reference remains in place.  */
1341	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1342	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1343			      VOIDmode, PATTERN (insn));
1344
1345	/* Discard obvious no-ops, even without -O.  This optimization
1346	   is fast and doesn't interfere with debugging.  */
1347	if (NONJUMP_INSN_P (insn)
1348	    && GET_CODE (PATTERN (insn)) == SET
1349	    && REG_P (SET_SRC (PATTERN (insn)))
1350	    && REG_P (SET_DEST (PATTERN (insn)))
1351	    && (REGNO (SET_SRC (PATTERN (insn)))
1352		== REGNO (SET_DEST (PATTERN (insn)))))
1353	  {
1354	    delete_insn (insn);
1355	    continue;
1356	  }
1357
1358	pnote = &REG_NOTES (insn);
1359	while (*pnote != 0)
1360	  {
1361	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1362		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1363		|| REG_NOTE_KIND (*pnote) == REG_INC)
1364	      *pnote = XEXP (*pnote, 1);
1365	    else
1366	      pnote = &XEXP (*pnote, 1);
1367	  }
1368
1369#ifdef AUTO_INC_DEC
1370	add_auto_inc_notes (insn, PATTERN (insn));
1371#endif
1372
1373	/* Simplify (subreg (reg)) if it appears as an operand.  */
1374	cleanup_subreg_operands (insn);
1375
1376	/* Clean up invalid ASMs so that they don't confuse later passes.
1377	   See PR 21299.  */
1378	if (asm_noperands (PATTERN (insn)) >= 0)
1379	  {
1380	    extract_insn (insn);
1381	    if (!constrain_operands (1))
1382	      {
1383		error_for_asm (insn,
1384			       "%<asm%> operand has impossible constraints");
1385		delete_insn (insn);
1386		continue;
1387	      }
1388	  }
1389      }
1390
1391  /* If we are doing generic stack checking, give a warning if this
1392     function's frame size is larger than we expect.  */
1393  if (flag_stack_check == GENERIC_STACK_CHECK)
1394    {
1395      HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1396      static int verbose_warned = 0;
1397
1398      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1399	if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1400	  size += UNITS_PER_WORD;
1401
1402      if (size > STACK_CHECK_MAX_FRAME_SIZE)
1403	{
1404	  warning (0, "frame size too large for reliable stack checking");
1405	  if (! verbose_warned)
1406	    {
1407	      warning (0, "try reducing the number of local variables");
1408	      verbose_warned = 1;
1409	    }
1410	}
1411    }
1412
1413  /* Indicate that we no longer have known memory locations or constants.  */
1414  if (reg_equiv_constant)
1415    free (reg_equiv_constant);
1416  if (reg_equiv_invariant)
1417    free (reg_equiv_invariant);
1418  reg_equiv_constant = 0;
1419  reg_equiv_invariant = 0;
1420  VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
1421  reg_equiv_memory_loc = 0;
1422
1423  free (temp_pseudo_reg_arr);
1424
1425  if (offsets_known_at)
1426    free (offsets_known_at);
1427  if (offsets_at)
1428    free (offsets_at);
1429
1430  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1431    if (reg_equiv_alt_mem_list[i])
1432      free_EXPR_LIST_list (&reg_equiv_alt_mem_list[i]);
1433  free (reg_equiv_alt_mem_list);
1434
1435  free (reg_equiv_mem);
1436  reg_equiv_init = 0;
1437  free (reg_equiv_address);
1438  free (reg_max_ref_width);
1439  free (reg_old_renumber);
1440  free (pseudo_previous_regs);
1441  free (pseudo_forbidden_regs);
1442
1443  CLEAR_HARD_REG_SET (used_spill_regs);
1444  for (i = 0; i < n_spills; i++)
1445    SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1446
1447  /* Free all the insn_chain structures at once.  */
1448  obstack_free (&reload_obstack, reload_startobj);
1449  unused_insn_chains = 0;
1450  fixup_abnormal_edges ();
1451
1452  /* Replacing pseudos with their memory equivalents might have
1453     created shared rtx.  Subsequent passes would get confused
1454     by this, so unshare everything here.  */
1455  unshare_all_rtl_again (first);
1456
1457#ifdef STACK_BOUNDARY
1458  /* init_emit has set the alignment of the hard frame pointer
1459     to STACK_BOUNDARY.  It is very likely no longer valid if
1460     the hard frame pointer was used for register allocation.  */
1461  if (!frame_pointer_needed)
1462    REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1463#endif
1464
1465  VEC_free (rtx_p, heap, substitute_stack);
1466
1467  return failure;
1468}
1469
1470/* Yet another special case.  Unfortunately, reg-stack forces people to
1471   write incorrect clobbers in asm statements.  These clobbers must not
1472   cause the register to appear in bad_spill_regs, otherwise we'll call
1473   fatal_insn later.  We clear the corresponding regnos in the live
1474   register sets to avoid this.
1475   The whole thing is rather sick, I'm afraid.  */
1476
1477static void
1478maybe_fix_stack_asms (void)
1479{
1480#ifdef STACK_REGS
1481  const char *constraints[MAX_RECOG_OPERANDS];
1482  enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1483  struct insn_chain *chain;
1484
1485  for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1486    {
1487      int i, noperands;
1488      HARD_REG_SET clobbered, allowed;
1489      rtx pat;
1490
1491      if (! INSN_P (chain->insn)
1492	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1493	continue;
1494      pat = PATTERN (chain->insn);
1495      if (GET_CODE (pat) != PARALLEL)
1496	continue;
1497
1498      CLEAR_HARD_REG_SET (clobbered);
1499      CLEAR_HARD_REG_SET (allowed);
1500
1501      /* First, make a mask of all stack regs that are clobbered.  */
1502      for (i = 0; i < XVECLEN (pat, 0); i++)
1503	{
1504	  rtx t = XVECEXP (pat, 0, i);
1505	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1506	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1507	}
1508
1509      /* Get the operand values and constraints out of the insn.  */
1510      decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1511			   constraints, operand_mode, NULL);
1512
1513      /* For every operand, see what registers are allowed.  */
1514      for (i = 0; i < noperands; i++)
1515	{
1516	  const char *p = constraints[i];
1517	  /* For every alternative, we compute the class of registers allowed
1518	     for reloading in CLS, and merge its contents into the reg set
1519	     ALLOWED.  */
1520	  int cls = (int) NO_REGS;
1521
1522	  for (;;)
1523	    {
1524	      char c = *p;
1525
1526	      if (c == '\0' || c == ',' || c == '#')
1527		{
1528		  /* End of one alternative - mark the regs in the current
1529		     class, and reset the class.  */
1530		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1531		  cls = NO_REGS;
1532		  p++;
1533		  if (c == '#')
1534		    do {
1535		      c = *p++;
1536		    } while (c != '\0' && c != ',');
1537		  if (c == '\0')
1538		    break;
1539		  continue;
1540		}
1541
1542	      switch (c)
1543		{
1544		case '=': case '+': case '*': case '%': case '?': case '!':
1545		case '0': case '1': case '2': case '3': case '4': case '<':
1546		case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1547		case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1548		case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1549		case TARGET_MEM_CONSTRAINT:
1550		  break;
1551
1552		case 'p':
1553		  cls = (int) reg_class_subunion[cls]
1554		      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1555		  break;
1556
1557		case 'g':
1558		case 'r':
1559		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1560		  break;
1561
1562		default:
1563		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1564		    cls = (int) reg_class_subunion[cls]
1565		      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1566		  else
1567		    cls = (int) reg_class_subunion[cls]
1568		      [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1569		}
1570	      p += CONSTRAINT_LEN (c, p);
1571	    }
1572	}
1573      /* Those of the registers which are clobbered, but allowed by the
1574	 constraints, must be usable as reload registers.  So clear them
1575	 out of the life information.  */
1576      AND_HARD_REG_SET (allowed, clobbered);
1577      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1578	if (TEST_HARD_REG_BIT (allowed, i))
1579	  {
1580	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1581	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1582	  }
1583    }
1584
1585#endif
1586}
1587
1588/* Copy the global variables n_reloads and rld into the corresponding elts
1589   of CHAIN.  */
1590static void
1591copy_reloads (struct insn_chain *chain)
1592{
1593  chain->n_reloads = n_reloads;
1594  chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1595  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1596  reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1597}
1598
1599/* Walk the chain of insns, and determine for each whether it needs reloads
1600   and/or eliminations.  Build the corresponding insns_need_reload list, and
1601   set something_needs_elimination as appropriate.  */
1602static void
1603calculate_needs_all_insns (int global)
1604{
1605  struct insn_chain **pprev_reload = &insns_need_reload;
1606  struct insn_chain *chain, *next = 0;
1607
1608  something_needs_elimination = 0;
1609
1610  reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1611  for (chain = reload_insn_chain; chain != 0; chain = next)
1612    {
1613      rtx insn = chain->insn;
1614
1615      next = chain->next;
1616
1617      /* Clear out the shortcuts.  */
1618      chain->n_reloads = 0;
1619      chain->need_elim = 0;
1620      chain->need_reload = 0;
1621      chain->need_operand_change = 0;
1622
1623      /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1624	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1625	 what effects this has on the known offsets at labels.  */
1626
1627      if (LABEL_P (insn) || JUMP_P (insn)
1628	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1629	set_label_offsets (insn, insn, 0);
1630
1631      if (INSN_P (insn))
1632	{
1633	  rtx old_body = PATTERN (insn);
1634	  int old_code = INSN_CODE (insn);
1635	  rtx old_notes = REG_NOTES (insn);
1636	  int did_elimination = 0;
1637	  int operands_changed = 0;
1638	  rtx set = single_set (insn);
1639
1640	  /* Skip insns that only set an equivalence.  */
1641	  if (set && REG_P (SET_DEST (set))
1642	      && reg_renumber[REGNO (SET_DEST (set))] < 0
1643	      && (reg_equiv_constant[REGNO (SET_DEST (set))]
1644		  || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1645		      && reg_equiv_init[REGNO (SET_DEST (set))])
1646	    continue;
1647
1648	  /* If needed, eliminate any eliminable registers.  */
1649	  if (num_eliminable || num_eliminable_invariants)
1650	    did_elimination = eliminate_regs_in_insn (insn, 0);
1651
1652	  /* Analyze the instruction.  */
1653	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1654					   global, spill_reg_order);
1655
1656	  /* If a no-op set needs more than one reload, this is likely
1657	     to be something that needs input address reloads.  We
1658	     can't get rid of this cleanly later, and it is of no use
1659	     anyway, so discard it now.
1660	     We only do this when expensive_optimizations is enabled,
1661	     since this complements reload inheritance / output
1662	     reload deletion, and it can make debugging harder.  */
1663	  if (flag_expensive_optimizations && n_reloads > 1)
1664	    {
1665	      rtx set = single_set (insn);
1666	      if (set
1667		  &&
1668		  ((SET_SRC (set) == SET_DEST (set)
1669		    && REG_P (SET_SRC (set))
1670		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1671		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1672		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1673		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1674		       && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1675		       && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1676		       && rtx_equal_p (reg_equiv_memory_loc
1677				       [REGNO (SET_SRC (set))],
1678				       reg_equiv_memory_loc
1679				       [REGNO (SET_DEST (set))]))))
1680		{
1681		  if (ira_conflicts_p)
1682		    /* Inform IRA about the insn deletion.  */
1683		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1684						   REGNO (SET_SRC (set)));
1685		  delete_insn (insn);
1686		  /* Delete it from the reload chain.  */
1687		  if (chain->prev)
1688		    chain->prev->next = next;
1689		  else
1690		    reload_insn_chain = next;
1691		  if (next)
1692		    next->prev = chain->prev;
1693		  chain->next = unused_insn_chains;
1694		  unused_insn_chains = chain;
1695		  continue;
1696		}
1697	    }
1698	  if (num_eliminable)
1699	    update_eliminable_offsets ();
1700
1701	  /* Remember for later shortcuts which insns had any reloads or
1702	     register eliminations.  */
1703	  chain->need_elim = did_elimination;
1704	  chain->need_reload = n_reloads > 0;
1705	  chain->need_operand_change = operands_changed;
1706
1707	  /* Discard any register replacements done.  */
1708	  if (did_elimination)
1709	    {
1710	      obstack_free (&reload_obstack, reload_insn_firstobj);
1711	      PATTERN (insn) = old_body;
1712	      INSN_CODE (insn) = old_code;
1713	      REG_NOTES (insn) = old_notes;
1714	      something_needs_elimination = 1;
1715	    }
1716
1717	  something_needs_operands_changed |= operands_changed;
1718
1719	  if (n_reloads != 0)
1720	    {
1721	      copy_reloads (chain);
1722	      *pprev_reload = chain;
1723	      pprev_reload = &chain->next_need_reload;
1724	    }
1725	}
1726    }
1727  *pprev_reload = 0;
1728}
1729
1730/* Comparison function for qsort to decide which of two reloads
1731   should be handled first.  *P1 and *P2 are the reload numbers.  */
1732
1733static int
1734reload_reg_class_lower (const void *r1p, const void *r2p)
1735{
1736  int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1737  int t;
1738
1739  /* Consider required reloads before optional ones.  */
1740  t = rld[r1].optional - rld[r2].optional;
1741  if (t != 0)
1742    return t;
1743
1744  /* Count all solitary classes before non-solitary ones.  */
1745  t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1746       - (reg_class_size[(int) rld[r1].rclass] == 1));
1747  if (t != 0)
1748    return t;
1749
1750  /* Aside from solitaires, consider all multi-reg groups first.  */
1751  t = rld[r2].nregs - rld[r1].nregs;
1752  if (t != 0)
1753    return t;
1754
1755  /* Consider reloads in order of increasing reg-class number.  */
1756  t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1757  if (t != 0)
1758    return t;
1759
1760  /* If reloads are equally urgent, sort by reload number,
1761     so that the results of qsort leave nothing to chance.  */
1762  return r1 - r2;
1763}
1764
1765/* The cost of spilling each hard reg.  */
1766static int spill_cost[FIRST_PSEUDO_REGISTER];
1767
1768/* When spilling multiple hard registers, we use SPILL_COST for the first
1769   spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1770   only the first hard reg for a multi-reg pseudo.  */
1771static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1772
1773/* Map of hard regno to pseudo regno currently occupying the hard
1774   reg.  */
1775static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1776
1777/* Update the spill cost arrays, considering that pseudo REG is live.  */
1778
1779static void
1780count_pseudo (int reg)
1781{
1782  int freq = REG_FREQ (reg);
1783  int r = reg_renumber[reg];
1784  int nregs;
1785
1786  if (REGNO_REG_SET_P (&pseudos_counted, reg)
1787      || REGNO_REG_SET_P (&spilled_pseudos, reg)
1788      /* Ignore spilled pseudo-registers which can be here only if IRA
1789	 is used.  */
1790      || (ira_conflicts_p && r < 0))
1791    return;
1792
1793  SET_REGNO_REG_SET (&pseudos_counted, reg);
1794
1795  gcc_assert (r >= 0);
1796
1797  spill_add_cost[r] += freq;
1798  nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1799  while (nregs-- > 0)
1800    {
1801      hard_regno_to_pseudo_regno[r + nregs] = reg;
1802      spill_cost[r + nregs] += freq;
1803    }
1804}
1805
1806/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1807   contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1808
1809static void
1810order_regs_for_reload (struct insn_chain *chain)
1811{
1812  unsigned i;
1813  HARD_REG_SET used_by_pseudos;
1814  HARD_REG_SET used_by_pseudos2;
1815  reg_set_iterator rsi;
1816
1817  COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1818
1819  memset (spill_cost, 0, sizeof spill_cost);
1820  memset (spill_add_cost, 0, sizeof spill_add_cost);
1821  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1822    hard_regno_to_pseudo_regno[i] = -1;
1823
1824  /* Count number of uses of each hard reg by pseudo regs allocated to it
1825     and then order them by decreasing use.  First exclude hard registers
1826     that are live in or across this insn.  */
1827
1828  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1829  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1830  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1831  IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1832
1833  /* Now find out which pseudos are allocated to it, and update
1834     hard_reg_n_uses.  */
1835  CLEAR_REG_SET (&pseudos_counted);
1836
1837  EXECUTE_IF_SET_IN_REG_SET
1838    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1839    {
1840      count_pseudo (i);
1841    }
1842  EXECUTE_IF_SET_IN_REG_SET
1843    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1844    {
1845      count_pseudo (i);
1846    }
1847  CLEAR_REG_SET (&pseudos_counted);
1848}
1849
1850/* Vector of reload-numbers showing the order in which the reloads should
1851   be processed.  */
1852static short reload_order[MAX_RELOADS];
1853
1854/* This is used to keep track of the spill regs used in one insn.  */
1855static HARD_REG_SET used_spill_regs_local;
1856
1857/* We decided to spill hard register SPILLED, which has a size of
1858   SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1859   is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1860   update SPILL_COST/SPILL_ADD_COST.  */
1861
1862static void
1863count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1864{
1865  int freq = REG_FREQ (reg);
1866  int r = reg_renumber[reg];
1867  int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1868
1869  /* Ignore spilled pseudo-registers which can be here only if IRA is
1870     used.  */
1871  if ((ira_conflicts_p && r < 0)
1872      || REGNO_REG_SET_P (&spilled_pseudos, reg)
1873      || spilled + spilled_nregs <= r || r + nregs <= spilled)
1874    return;
1875
1876  SET_REGNO_REG_SET (&spilled_pseudos, reg);
1877
1878  spill_add_cost[r] -= freq;
1879  while (nregs-- > 0)
1880    {
1881      hard_regno_to_pseudo_regno[r + nregs] = -1;
1882      spill_cost[r + nregs] -= freq;
1883    }
1884}
1885
1886/* Find reload register to use for reload number ORDER.  */
1887
1888static int
1889find_reg (struct insn_chain *chain, int order)
1890{
1891  int rnum = reload_order[order];
1892  struct reload *rl = rld + rnum;
1893  int best_cost = INT_MAX;
1894  int best_reg = -1;
1895  unsigned int i, j, n;
1896  int k;
1897  HARD_REG_SET not_usable;
1898  HARD_REG_SET used_by_other_reload;
1899  reg_set_iterator rsi;
1900  static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1901  static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1902
1903  COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1904  IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1905  IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1906
1907  CLEAR_HARD_REG_SET (used_by_other_reload);
1908  for (k = 0; k < order; k++)
1909    {
1910      int other = reload_order[k];
1911
1912      if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1913	for (j = 0; j < rld[other].nregs; j++)
1914	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1915    }
1916
1917  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1918    {
1919#ifdef REG_ALLOC_ORDER
1920      unsigned int regno = reg_alloc_order[i];
1921#else
1922      unsigned int regno = i;
1923#endif
1924
1925      if (! TEST_HARD_REG_BIT (not_usable, regno)
1926	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1927	  && HARD_REGNO_MODE_OK (regno, rl->mode))
1928	{
1929	  int this_cost = spill_cost[regno];
1930	  int ok = 1;
1931	  unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1932
1933	  for (j = 1; j < this_nregs; j++)
1934	    {
1935	      this_cost += spill_add_cost[regno + j];
1936	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1937		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1938		ok = 0;
1939	    }
1940	  if (! ok)
1941	    continue;
1942
1943	  if (ira_conflicts_p)
1944	    {
1945	      /* Ask IRA to find a better pseudo-register for
1946		 spilling.  */
1947	      for (n = j = 0; j < this_nregs; j++)
1948		{
1949		  int r = hard_regno_to_pseudo_regno[regno + j];
1950
1951		  if (r < 0)
1952		    continue;
1953		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1954		    regno_pseudo_regs[n++] = r;
1955		}
1956	      regno_pseudo_regs[n++] = -1;
1957	      if (best_reg < 0
1958		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1959						      best_regno_pseudo_regs,
1960						      rl->in, rl->out,
1961						      chain->insn))
1962		{
1963		  best_reg = regno;
1964		  for (j = 0;; j++)
1965		    {
1966		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1967		      if (regno_pseudo_regs[j] < 0)
1968			break;
1969		    }
1970		}
1971	      continue;
1972	    }
1973
1974	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1975	    this_cost--;
1976	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1977	    this_cost--;
1978	  if (this_cost < best_cost
1979	      /* Among registers with equal cost, prefer caller-saved ones, or
1980		 use REG_ALLOC_ORDER if it is defined.  */
1981	      || (this_cost == best_cost
1982#ifdef REG_ALLOC_ORDER
1983		  && (inv_reg_alloc_order[regno]
1984		      < inv_reg_alloc_order[best_reg])
1985#else
1986		  && call_used_regs[regno]
1987		  && ! call_used_regs[best_reg]
1988#endif
1989		  ))
1990	    {
1991	      best_reg = regno;
1992	      best_cost = this_cost;
1993	    }
1994	}
1995    }
1996  if (best_reg == -1)
1997    return 0;
1998
1999  if (dump_file)
2000    fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
2001
2002  rl->nregs = hard_regno_nregs[best_reg][rl->mode];
2003  rl->regno = best_reg;
2004
2005  EXECUTE_IF_SET_IN_REG_SET
2006    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
2007    {
2008      count_spilled_pseudo (best_reg, rl->nregs, j);
2009    }
2010
2011  EXECUTE_IF_SET_IN_REG_SET
2012    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
2013    {
2014      count_spilled_pseudo (best_reg, rl->nregs, j);
2015    }
2016
2017  for (i = 0; i < rl->nregs; i++)
2018    {
2019      gcc_assert (spill_cost[best_reg + i] == 0);
2020      gcc_assert (spill_add_cost[best_reg + i] == 0);
2021      gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
2022      SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
2023    }
2024  return 1;
2025}
2026
2027/* Find more reload regs to satisfy the remaining need of an insn, which
2028   is given by CHAIN.
2029   Do it by ascending class number, since otherwise a reg
2030   might be spilled for a big class and might fail to count
2031   for a smaller class even though it belongs to that class.  */
2032
2033static void
2034find_reload_regs (struct insn_chain *chain)
2035{
2036  int i;
2037
2038  /* In order to be certain of getting the registers we need,
2039     we must sort the reloads into order of increasing register class.
2040     Then our grabbing of reload registers will parallel the process
2041     that provided the reload registers.  */
2042  for (i = 0; i < chain->n_reloads; i++)
2043    {
2044      /* Show whether this reload already has a hard reg.  */
2045      if (chain->rld[i].reg_rtx)
2046	{
2047	  int regno = REGNO (chain->rld[i].reg_rtx);
2048	  chain->rld[i].regno = regno;
2049	  chain->rld[i].nregs
2050	    = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2051	}
2052      else
2053	chain->rld[i].regno = -1;
2054      reload_order[i] = i;
2055    }
2056
2057  n_reloads = chain->n_reloads;
2058  memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2059
2060  CLEAR_HARD_REG_SET (used_spill_regs_local);
2061
2062  if (dump_file)
2063    fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2064
2065  qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2066
2067  /* Compute the order of preference for hard registers to spill.  */
2068
2069  order_regs_for_reload (chain);
2070
2071  for (i = 0; i < n_reloads; i++)
2072    {
2073      int r = reload_order[i];
2074
2075      /* Ignore reloads that got marked inoperative.  */
2076      if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2077	  && ! rld[r].optional
2078	  && rld[r].regno == -1)
2079	if (! find_reg (chain, i))
2080	  {
2081	    if (dump_file)
2082	      fprintf (dump_file, "reload failure for reload %d\n", r);
2083	    spill_failure (chain->insn, rld[r].rclass);
2084	    failure = 1;
2085	    return;
2086	  }
2087    }
2088
2089  COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2090  IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2091
2092  memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2093}
2094
2095static void
2096select_reload_regs (void)
2097{
2098  struct insn_chain *chain;
2099
2100  /* Try to satisfy the needs for each insn.  */
2101  for (chain = insns_need_reload; chain != 0;
2102       chain = chain->next_need_reload)
2103    find_reload_regs (chain);
2104}
2105
2106/* Delete all insns that were inserted by emit_caller_save_insns during
2107   this iteration.  */
2108static void
2109delete_caller_save_insns (void)
2110{
2111  struct insn_chain *c = reload_insn_chain;
2112
2113  while (c != 0)
2114    {
2115      while (c != 0 && c->is_caller_save_insn)
2116	{
2117	  struct insn_chain *next = c->next;
2118	  rtx insn = c->insn;
2119
2120	  if (c == reload_insn_chain)
2121	    reload_insn_chain = next;
2122	  delete_insn (insn);
2123
2124	  if (next)
2125	    next->prev = c->prev;
2126	  if (c->prev)
2127	    c->prev->next = next;
2128	  c->next = unused_insn_chains;
2129	  unused_insn_chains = c;
2130	  c = next;
2131	}
2132      if (c != 0)
2133	c = c->next;
2134    }
2135}
2136
2137/* Handle the failure to find a register to spill.
2138   INSN should be one of the insns which needed this particular spill reg.  */
2139
2140static void
2141spill_failure (rtx insn, enum reg_class rclass)
2142{
2143  if (asm_noperands (PATTERN (insn)) >= 0)
2144    error_for_asm (insn, "can't find a register in class %qs while "
2145		   "reloading %<asm%>",
2146		   reg_class_names[rclass]);
2147  else
2148    {
2149      error ("unable to find a register to spill in class %qs",
2150	     reg_class_names[rclass]);
2151
2152      if (dump_file)
2153	{
2154	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2155	  debug_reload_to_stream (dump_file);
2156	}
2157      fatal_insn ("this is the insn:", insn);
2158    }
2159}
2160
2161/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2162   data that is dead in INSN.  */
2163
2164static void
2165delete_dead_insn (rtx insn)
2166{
2167  rtx prev = prev_real_insn (insn);
2168  rtx prev_dest;
2169
2170  /* If the previous insn sets a register that dies in our insn, delete it
2171     too.  */
2172  if (prev && GET_CODE (PATTERN (prev)) == SET
2173      && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2174      && reg_mentioned_p (prev_dest, PATTERN (insn))
2175      && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2176      && ! side_effects_p (SET_SRC (PATTERN (prev))))
2177    delete_dead_insn (prev);
2178
2179  SET_INSN_DELETED (insn);
2180}
2181
2182/* Modify the home of pseudo-reg I.
2183   The new home is present in reg_renumber[I].
2184
2185   FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2186   or it may be -1, meaning there is none or it is not relevant.
2187   This is used so that all pseudos spilled from a given hard reg
2188   can share one stack slot.  */
2189
2190static void
2191alter_reg (int i, int from_reg, bool dont_share_p)
2192{
2193  /* When outputting an inline function, this can happen
2194     for a reg that isn't actually used.  */
2195  if (regno_reg_rtx[i] == 0)
2196    return;
2197
2198  /* If the reg got changed to a MEM at rtl-generation time,
2199     ignore it.  */
2200  if (!REG_P (regno_reg_rtx[i]))
2201    return;
2202
2203  /* Modify the reg-rtx to contain the new hard reg
2204     number or else to contain its pseudo reg number.  */
2205  SET_REGNO (regno_reg_rtx[i],
2206	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2207
2208  /* If we have a pseudo that is needed but has no hard reg or equivalent,
2209     allocate a stack slot for it.  */
2210
2211  if (reg_renumber[i] < 0
2212      && REG_N_REFS (i) > 0
2213      && reg_equiv_constant[i] == 0
2214      && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2215      && reg_equiv_memory_loc[i] == 0)
2216    {
2217      rtx x = NULL_RTX;
2218      enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2219      unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2220      unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2221      unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2222      unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2223      int adjust = 0;
2224
2225      if (ira_conflicts_p)
2226	{
2227	  /* Mark the spill for IRA.  */
2228	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2229	  if (!dont_share_p)
2230	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2231	}
2232
2233      if (x)
2234	;
2235
2236      /* Each pseudo reg has an inherent size which comes from its own mode,
2237	 and a total size which provides room for paradoxical subregs
2238	 which refer to the pseudo reg in wider modes.
2239
2240	 We can use a slot already allocated if it provides both
2241	 enough inherent space and enough total space.
2242	 Otherwise, we allocate a new slot, making sure that it has no less
2243	 inherent space, and no less total space, then the previous slot.  */
2244      else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2245	{
2246	  rtx stack_slot;
2247
2248	  /* No known place to spill from => no slot to reuse.  */
2249	  x = assign_stack_local (mode, total_size,
2250				  min_align > inherent_align
2251				  || total_size > inherent_size ? -1 : 0);
2252
2253	  stack_slot = x;
2254
2255	  /* Cancel the big-endian correction done in assign_stack_local.
2256	     Get the address of the beginning of the slot.  This is so we
2257	     can do a big-endian correction unconditionally below.  */
2258	  if (BYTES_BIG_ENDIAN)
2259	    {
2260	      adjust = inherent_size - total_size;
2261	      if (adjust)
2262		stack_slot
2263		  = adjust_address_nv (x, mode_for_size (total_size
2264						         * BITS_PER_UNIT,
2265						         MODE_INT, 1),
2266				       adjust);
2267	    }
2268
2269	  if (! dont_share_p && ira_conflicts_p)
2270	    /* Inform IRA about allocation a new stack slot.  */
2271	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2272	}
2273
2274      /* Reuse a stack slot if possible.  */
2275      else if (spill_stack_slot[from_reg] != 0
2276	       && spill_stack_slot_width[from_reg] >= total_size
2277	       && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2278		   >= inherent_size)
2279	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2280	x = spill_stack_slot[from_reg];
2281
2282      /* Allocate a bigger slot.  */
2283      else
2284	{
2285	  /* Compute maximum size needed, both for inherent size
2286	     and for total size.  */
2287	  rtx stack_slot;
2288
2289	  if (spill_stack_slot[from_reg])
2290	    {
2291	      if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2292		  > inherent_size)
2293		mode = GET_MODE (spill_stack_slot[from_reg]);
2294	      if (spill_stack_slot_width[from_reg] > total_size)
2295		total_size = spill_stack_slot_width[from_reg];
2296	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2297		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2298	    }
2299
2300	  /* Make a slot with that size.  */
2301	  x = assign_stack_local (mode, total_size,
2302				  min_align > inherent_align
2303				  || total_size > inherent_size ? -1 : 0);
2304	  stack_slot = x;
2305
2306	  /* Cancel the  big-endian correction done in assign_stack_local.
2307	     Get the address of the beginning of the slot.  This is so we
2308	     can do a big-endian correction unconditionally below.  */
2309	  if (BYTES_BIG_ENDIAN)
2310	    {
2311	      adjust = GET_MODE_SIZE (mode) - total_size;
2312	      if (adjust)
2313		stack_slot
2314		  = adjust_address_nv (x, mode_for_size (total_size
2315							 * BITS_PER_UNIT,
2316							 MODE_INT, 1),
2317				       adjust);
2318	    }
2319
2320	  spill_stack_slot[from_reg] = stack_slot;
2321	  spill_stack_slot_width[from_reg] = total_size;
2322	}
2323
2324      /* On a big endian machine, the "address" of the slot
2325	 is the address of the low part that fits its inherent mode.  */
2326      if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2327	adjust += (total_size - inherent_size);
2328
2329      /* If we have any adjustment to make, or if the stack slot is the
2330	 wrong mode, make a new stack slot.  */
2331      x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2332
2333      /* Set all of the memory attributes as appropriate for a spill.  */
2334      set_mem_attrs_for_spill (x);
2335
2336      /* Save the stack slot for later.  */
2337      reg_equiv_memory_loc[i] = x;
2338    }
2339}
2340
2341/* Mark the slots in regs_ever_live for the hard regs used by
2342   pseudo-reg number REGNO, accessed in MODE.  */
2343
2344static void
2345mark_home_live_1 (int regno, enum machine_mode mode)
2346{
2347  int i, lim;
2348
2349  i = reg_renumber[regno];
2350  if (i < 0)
2351    return;
2352  lim = end_hard_regno (mode, i);
2353  while (i < lim)
2354    df_set_regs_ever_live(i++, true);
2355}
2356
2357/* Mark the slots in regs_ever_live for the hard regs
2358   used by pseudo-reg number REGNO.  */
2359
2360void
2361mark_home_live (int regno)
2362{
2363  if (reg_renumber[regno] >= 0)
2364    mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2365}
2366
2367/* This function handles the tracking of elimination offsets around branches.
2368
2369   X is a piece of RTL being scanned.
2370
2371   INSN is the insn that it came from, if any.
2372
2373   INITIAL_P is nonzero if we are to set the offset to be the initial
2374   offset and zero if we are setting the offset of the label to be the
2375   current offset.  */
2376
2377static void
2378set_label_offsets (rtx x, rtx insn, int initial_p)
2379{
2380  enum rtx_code code = GET_CODE (x);
2381  rtx tem;
2382  unsigned int i;
2383  struct elim_table *p;
2384
2385  switch (code)
2386    {
2387    case LABEL_REF:
2388      if (LABEL_REF_NONLOCAL_P (x))
2389	return;
2390
2391      x = XEXP (x, 0);
2392
2393      /* ... fall through ...  */
2394
2395    case CODE_LABEL:
2396      /* If we know nothing about this label, set the desired offsets.  Note
2397	 that this sets the offset at a label to be the offset before a label
2398	 if we don't know anything about the label.  This is not correct for
2399	 the label after a BARRIER, but is the best guess we can make.  If
2400	 we guessed wrong, we will suppress an elimination that might have
2401	 been possible had we been able to guess correctly.  */
2402
2403      if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2404	{
2405	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2406	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2407	      = (initial_p ? reg_eliminate[i].initial_offset
2408		 : reg_eliminate[i].offset);
2409	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2410	}
2411
2412      /* Otherwise, if this is the definition of a label and it is
2413	 preceded by a BARRIER, set our offsets to the known offset of
2414	 that label.  */
2415
2416      else if (x == insn
2417	       && (tem = prev_nonnote_insn (insn)) != 0
2418	       && BARRIER_P (tem))
2419	set_offsets_for_label (insn);
2420      else
2421	/* If neither of the above cases is true, compare each offset
2422	   with those previously recorded and suppress any eliminations
2423	   where the offsets disagree.  */
2424
2425	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2426	  if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2427	      != (initial_p ? reg_eliminate[i].initial_offset
2428		  : reg_eliminate[i].offset))
2429	    reg_eliminate[i].can_eliminate = 0;
2430
2431      return;
2432
2433    case JUMP_INSN:
2434      set_label_offsets (PATTERN (insn), insn, initial_p);
2435
2436      /* ... fall through ...  */
2437
2438    case INSN:
2439    case CALL_INSN:
2440      /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2441	 to indirectly and hence must have all eliminations at their
2442	 initial offsets.  */
2443      for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2444	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2445	  set_label_offsets (XEXP (tem, 0), insn, 1);
2446      return;
2447
2448    case PARALLEL:
2449    case ADDR_VEC:
2450    case ADDR_DIFF_VEC:
2451      /* Each of the labels in the parallel or address vector must be
2452	 at their initial offsets.  We want the first field for PARALLEL
2453	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2454
2455      for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2456	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2457			   insn, initial_p);
2458      return;
2459
2460    case SET:
2461      /* We only care about setting PC.  If the source is not RETURN,
2462	 IF_THEN_ELSE, or a label, disable any eliminations not at
2463	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2464	 isn't one of those possibilities.  For branches to a label,
2465	 call ourselves recursively.
2466
2467	 Note that this can disable elimination unnecessarily when we have
2468	 a non-local goto since it will look like a non-constant jump to
2469	 someplace in the current function.  This isn't a significant
2470	 problem since such jumps will normally be when all elimination
2471	 pairs are back to their initial offsets.  */
2472
2473      if (SET_DEST (x) != pc_rtx)
2474	return;
2475
2476      switch (GET_CODE (SET_SRC (x)))
2477	{
2478	case PC:
2479	case RETURN:
2480	  return;
2481
2482	case LABEL_REF:
2483	  set_label_offsets (SET_SRC (x), insn, initial_p);
2484	  return;
2485
2486	case IF_THEN_ELSE:
2487	  tem = XEXP (SET_SRC (x), 1);
2488	  if (GET_CODE (tem) == LABEL_REF)
2489	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2490	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2491	    break;
2492
2493	  tem = XEXP (SET_SRC (x), 2);
2494	  if (GET_CODE (tem) == LABEL_REF)
2495	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2496	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2497	    break;
2498	  return;
2499
2500	default:
2501	  break;
2502	}
2503
2504      /* If we reach here, all eliminations must be at their initial
2505	 offset because we are doing a jump to a variable address.  */
2506      for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2507	if (p->offset != p->initial_offset)
2508	  p->can_eliminate = 0;
2509      break;
2510
2511    default:
2512      break;
2513    }
2514}
2515
2516/* Scan X and replace any eliminable registers (such as fp) with a
2517   replacement (such as sp), plus an offset.
2518
2519   MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2520   much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2521   MEM, we are allowed to replace a sum of a register and the constant zero
2522   with the register, which we cannot do outside a MEM.  In addition, we need
2523   to record the fact that a register is referenced outside a MEM.
2524
2525   If INSN is an insn, it is the insn containing X.  If we replace a REG
2526   in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2527   CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2528   the REG is being modified.
2529
2530   Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2531   That's used when we eliminate in expressions stored in notes.
2532   This means, do not set ref_outside_mem even if the reference
2533   is outside of MEMs.
2534
2535   REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2536   replacements done assuming all offsets are at their initial values.  If
2537   they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2538   encounter, return the actual location so that find_reloads will do
2539   the proper thing.  */
2540
2541static rtx
2542eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2543		  bool may_use_invariant)
2544{
2545  enum rtx_code code = GET_CODE (x);
2546  struct elim_table *ep;
2547  int regno;
2548  rtx new_rtx;
2549  int i, j;
2550  const char *fmt;
2551  int copied = 0;
2552
2553  if (! current_function_decl)
2554    return x;
2555
2556  switch (code)
2557    {
2558    case CONST_INT:
2559    case CONST_DOUBLE:
2560    case CONST_FIXED:
2561    case CONST_VECTOR:
2562    case CONST:
2563    case SYMBOL_REF:
2564    case CODE_LABEL:
2565    case PC:
2566    case CC0:
2567    case ASM_INPUT:
2568    case ADDR_VEC:
2569    case ADDR_DIFF_VEC:
2570    case RETURN:
2571      return x;
2572
2573    case REG:
2574      regno = REGNO (x);
2575
2576      /* First handle the case where we encounter a bare register that
2577	 is eliminable.  Replace it with a PLUS.  */
2578      if (regno < FIRST_PSEUDO_REGISTER)
2579	{
2580	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2581	       ep++)
2582	    if (ep->from_rtx == x && ep->can_eliminate)
2583	      return plus_constant (ep->to_rtx, ep->previous_offset);
2584
2585	}
2586      else if (reg_renumber && reg_renumber[regno] < 0
2587	       && reg_equiv_invariant && reg_equiv_invariant[regno])
2588	{
2589	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2590	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2591			             mem_mode, insn, true);
2592	  /* There exists at least one use of REGNO that cannot be
2593	     eliminated.  Prevent the defining insn from being deleted.  */
2594	  reg_equiv_init[regno] = NULL_RTX;
2595	  alter_reg (regno, -1, true);
2596	}
2597      return x;
2598
2599    /* You might think handling MINUS in a manner similar to PLUS is a
2600       good idea.  It is not.  It has been tried multiple times and every
2601       time the change has had to have been reverted.
2602
2603       Other parts of reload know a PLUS is special (gen_reload for example)
2604       and require special code to handle code a reloaded PLUS operand.
2605
2606       Also consider backends where the flags register is clobbered by a
2607       MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2608       lea instruction comes to mind).  If we try to reload a MINUS, we
2609       may kill the flags register that was holding a useful value.
2610
2611       So, please before trying to handle MINUS, consider reload as a
2612       whole instead of this little section as well as the backend issues.  */
2613    case PLUS:
2614      /* If this is the sum of an eliminable register and a constant, rework
2615	 the sum.  */
2616      if (REG_P (XEXP (x, 0))
2617	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2618	  && CONSTANT_P (XEXP (x, 1)))
2619	{
2620	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2621	       ep++)
2622	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2623	      {
2624		/* The only time we want to replace a PLUS with a REG (this
2625		   occurs when the constant operand of the PLUS is the negative
2626		   of the offset) is when we are inside a MEM.  We won't want
2627		   to do so at other times because that would change the
2628		   structure of the insn in a way that reload can't handle.
2629		   We special-case the commonest situation in
2630		   eliminate_regs_in_insn, so just replace a PLUS with a
2631		   PLUS here, unless inside a MEM.  */
2632		if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2633		    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2634		  return ep->to_rtx;
2635		else
2636		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2637				       plus_constant (XEXP (x, 1),
2638						      ep->previous_offset));
2639	      }
2640
2641	  /* If the register is not eliminable, we are done since the other
2642	     operand is a constant.  */
2643	  return x;
2644	}
2645
2646      /* If this is part of an address, we want to bring any constant to the
2647	 outermost PLUS.  We will do this by doing register replacement in
2648	 our operands and seeing if a constant shows up in one of them.
2649
2650	 Note that there is no risk of modifying the structure of the insn,
2651	 since we only get called for its operands, thus we are either
2652	 modifying the address inside a MEM, or something like an address
2653	 operand of a load-address insn.  */
2654
2655      {
2656	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2657	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2658
2659	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2660	  {
2661	    /* If one side is a PLUS and the other side is a pseudo that
2662	       didn't get a hard register but has a reg_equiv_constant,
2663	       we must replace the constant here since it may no longer
2664	       be in the position of any operand.  */
2665	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2666		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2667		&& reg_renumber[REGNO (new1)] < 0
2668		&& reg_equiv_constant != 0
2669		&& reg_equiv_constant[REGNO (new1)] != 0)
2670	      new1 = reg_equiv_constant[REGNO (new1)];
2671	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2672		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2673		     && reg_renumber[REGNO (new0)] < 0
2674		     && reg_equiv_constant[REGNO (new0)] != 0)
2675	      new0 = reg_equiv_constant[REGNO (new0)];
2676
2677	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2678
2679	    /* As above, if we are not inside a MEM we do not want to
2680	       turn a PLUS into something else.  We might try to do so here
2681	       for an addition of 0 if we aren't optimizing.  */
2682	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2683	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2684	    else
2685	      return new_rtx;
2686	  }
2687      }
2688      return x;
2689
2690    case MULT:
2691      /* If this is the product of an eliminable register and a
2692	 constant, apply the distribute law and move the constant out
2693	 so that we have (plus (mult ..) ..).  This is needed in order
2694	 to keep load-address insns valid.   This case is pathological.
2695	 We ignore the possibility of overflow here.  */
2696      if (REG_P (XEXP (x, 0))
2697	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2698	  && CONST_INT_P (XEXP (x, 1)))
2699	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2700	     ep++)
2701	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2702	    {
2703	      if (! mem_mode
2704		  /* Refs inside notes or in DEBUG_INSNs don't count for
2705		     this purpose.  */
2706		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2707				      || GET_CODE (insn) == INSN_LIST
2708				      || DEBUG_INSN_P (insn))))
2709		ep->ref_outside_mem = 1;
2710
2711	      return
2712		plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2713			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2714	    }
2715
2716      /* ... fall through ...  */
2717
2718    case CALL:
2719    case COMPARE:
2720    /* See comments before PLUS about handling MINUS.  */
2721    case MINUS:
2722    case DIV:      case UDIV:
2723    case MOD:      case UMOD:
2724    case AND:      case IOR:      case XOR:
2725    case ROTATERT: case ROTATE:
2726    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2727    case NE:       case EQ:
2728    case GE:       case GT:       case GEU:    case GTU:
2729    case LE:       case LT:       case LEU:    case LTU:
2730      {
2731	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2732	rtx new1 = XEXP (x, 1)
2733		   ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false) : 0;
2734
2735	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2736	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2737      }
2738      return x;
2739
2740    case EXPR_LIST:
2741      /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2742      if (XEXP (x, 0))
2743	{
2744	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true);
2745	  if (new_rtx != XEXP (x, 0))
2746	    {
2747	      /* If this is a REG_DEAD note, it is not valid anymore.
2748		 Using the eliminated version could result in creating a
2749		 REG_DEAD note for the stack or frame pointer.  */
2750	      if (REG_NOTE_KIND (x) == REG_DEAD)
2751		return (XEXP (x, 1)
2752			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true)
2753			: NULL_RTX);
2754
2755	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2756	    }
2757	}
2758
2759      /* ... fall through ...  */
2760
2761    case INSN_LIST:
2762      /* Now do eliminations in the rest of the chain.  If this was
2763	 an EXPR_LIST, this might result in allocating more memory than is
2764	 strictly needed, but it simplifies the code.  */
2765      if (XEXP (x, 1))
2766	{
2767	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true);
2768	  if (new_rtx != XEXP (x, 1))
2769	    return
2770	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2771	}
2772      return x;
2773
2774    case PRE_INC:
2775    case POST_INC:
2776    case PRE_DEC:
2777    case POST_DEC:
2778      /* We do not support elimination of a register that is modified.
2779	 elimination_effects has already make sure that this does not
2780	 happen.  */
2781      return x;
2782
2783    case PRE_MODIFY:
2784    case POST_MODIFY:
2785      /* We do not support elimination of a register that is modified.
2786	 elimination_effects has already make sure that this does not
2787	 happen.  The only remaining case we need to consider here is
2788	 that the increment value may be an eliminable register.  */
2789      if (GET_CODE (XEXP (x, 1)) == PLUS
2790	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2791	{
2792	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2793				      insn, true);
2794
2795	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2796	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2797				   gen_rtx_PLUS (GET_MODE (x),
2798						 XEXP (x, 0), new_rtx));
2799	}
2800      return x;
2801
2802    case STRICT_LOW_PART:
2803    case NEG:          case NOT:
2804    case SIGN_EXTEND:  case ZERO_EXTEND:
2805    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2806    case FLOAT:        case FIX:
2807    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2808    case ABS:
2809    case SQRT:
2810    case FFS:
2811    case CLZ:
2812    case CTZ:
2813    case POPCOUNT:
2814    case PARITY:
2815    case BSWAP:
2816      new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
2817      if (new_rtx != XEXP (x, 0))
2818	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2819      return x;
2820
2821    case SUBREG:
2822      /* Similar to above processing, but preserve SUBREG_BYTE.
2823	 Convert (subreg (mem)) to (mem) if not paradoxical.
2824	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2825	 pseudo didn't get a hard reg, we must replace this with the
2826	 eliminated version of the memory location because push_reload
2827	 may do the replacement in certain circumstances.  */
2828      if (REG_P (SUBREG_REG (x))
2829	  && (GET_MODE_SIZE (GET_MODE (x))
2830	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2831	  && reg_equiv_memory_loc != 0
2832	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2833	{
2834	  new_rtx = SUBREG_REG (x);
2835	}
2836      else
2837	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false);
2838
2839      if (new_rtx != SUBREG_REG (x))
2840	{
2841	  int x_size = GET_MODE_SIZE (GET_MODE (x));
2842	  int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2843
2844	  if (MEM_P (new_rtx)
2845	      && ((x_size < new_size
2846#ifdef WORD_REGISTER_OPERATIONS
2847		   /* On these machines, combine can create rtl of the form
2848		      (set (subreg:m1 (reg:m2 R) 0) ...)
2849		      where m1 < m2, and expects something interesting to
2850		      happen to the entire word.  Moreover, it will use the
2851		      (reg:m2 R) later, expecting all bits to be preserved.
2852		      So if the number of words is the same, preserve the
2853		      subreg so that push_reload can see it.  */
2854		   && ! ((x_size - 1) / UNITS_PER_WORD
2855			 == (new_size -1 ) / UNITS_PER_WORD)
2856#endif
2857		   )
2858		  || x_size == new_size)
2859	      )
2860	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2861	  else
2862	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2863	}
2864
2865      return x;
2866
2867    case MEM:
2868      /* Our only special processing is to pass the mode of the MEM to our
2869	 recursive call and copy the flags.  While we are here, handle this
2870	 case more efficiently.  */
2871      return
2872	replace_equiv_address_nv (x,
2873				  eliminate_regs_1 (XEXP (x, 0), GET_MODE (x),
2874						    insn, true));
2875
2876    case USE:
2877      /* Handle insn_list USE that a call to a pure function may generate.  */
2878      new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false);
2879      if (new_rtx != XEXP (x, 0))
2880	return gen_rtx_USE (GET_MODE (x), new_rtx);
2881      return x;
2882
2883    case CLOBBER:
2884      gcc_assert (insn && DEBUG_INSN_P (insn));
2885      break;
2886
2887    case ASM_OPERANDS:
2888    case SET:
2889      gcc_unreachable ();
2890
2891    default:
2892      break;
2893    }
2894
2895  /* Process each of our operands recursively.  If any have changed, make a
2896     copy of the rtx.  */
2897  fmt = GET_RTX_FORMAT (code);
2898  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2899    {
2900      if (*fmt == 'e')
2901	{
2902	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
2903	  if (new_rtx != XEXP (x, i) && ! copied)
2904	    {
2905	      x = shallow_copy_rtx (x);
2906	      copied = 1;
2907	    }
2908	  XEXP (x, i) = new_rtx;
2909	}
2910      else if (*fmt == 'E')
2911	{
2912	  int copied_vec = 0;
2913	  for (j = 0; j < XVECLEN (x, i); j++)
2914	    {
2915	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false);
2916	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2917		{
2918		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2919					     XVEC (x, i)->elem);
2920		  if (! copied)
2921		    {
2922		      x = shallow_copy_rtx (x);
2923		      copied = 1;
2924		    }
2925		  XVEC (x, i) = new_v;
2926		  copied_vec = 1;
2927		}
2928	      XVECEXP (x, i, j) = new_rtx;
2929	    }
2930	}
2931    }
2932
2933  return x;
2934}
2935
2936rtx
2937eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2938{
2939  return eliminate_regs_1 (x, mem_mode, insn, false);
2940}
2941
2942/* Scan rtx X for modifications of elimination target registers.  Update
2943   the table of eliminables to reflect the changed state.  MEM_MODE is
2944   the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2945
2946static void
2947elimination_effects (rtx x, enum machine_mode mem_mode)
2948{
2949  enum rtx_code code = GET_CODE (x);
2950  struct elim_table *ep;
2951  int regno;
2952  int i, j;
2953  const char *fmt;
2954
2955  switch (code)
2956    {
2957    case CONST_INT:
2958    case CONST_DOUBLE:
2959    case CONST_FIXED:
2960    case CONST_VECTOR:
2961    case CONST:
2962    case SYMBOL_REF:
2963    case CODE_LABEL:
2964    case PC:
2965    case CC0:
2966    case ASM_INPUT:
2967    case ADDR_VEC:
2968    case ADDR_DIFF_VEC:
2969    case RETURN:
2970      return;
2971
2972    case REG:
2973      regno = REGNO (x);
2974
2975      /* First handle the case where we encounter a bare register that
2976	 is eliminable.  Replace it with a PLUS.  */
2977      if (regno < FIRST_PSEUDO_REGISTER)
2978	{
2979	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2980	       ep++)
2981	    if (ep->from_rtx == x && ep->can_eliminate)
2982	      {
2983		if (! mem_mode)
2984		  ep->ref_outside_mem = 1;
2985		return;
2986	      }
2987
2988	}
2989      else if (reg_renumber[regno] < 0 && reg_equiv_constant
2990	       && reg_equiv_constant[regno]
2991	       && ! function_invariant_p (reg_equiv_constant[regno]))
2992	elimination_effects (reg_equiv_constant[regno], mem_mode);
2993      return;
2994
2995    case PRE_INC:
2996    case POST_INC:
2997    case PRE_DEC:
2998    case POST_DEC:
2999    case POST_MODIFY:
3000    case PRE_MODIFY:
3001      /* If we modify the source of an elimination rule, disable it.  */
3002      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3003	if (ep->from_rtx == XEXP (x, 0))
3004	  ep->can_eliminate = 0;
3005
3006      /* If we modify the target of an elimination rule by adding a constant,
3007	 update its offset.  If we modify the target in any other way, we'll
3008	 have to disable the rule as well.  */
3009      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3010	if (ep->to_rtx == XEXP (x, 0))
3011	  {
3012	    int size = GET_MODE_SIZE (mem_mode);
3013
3014	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3015#ifdef PUSH_ROUNDING
3016	    if (ep->to_rtx == stack_pointer_rtx)
3017	      size = PUSH_ROUNDING (size);
3018#endif
3019	    if (code == PRE_DEC || code == POST_DEC)
3020	      ep->offset += size;
3021	    else if (code == PRE_INC || code == POST_INC)
3022	      ep->offset -= size;
3023	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3024	      {
3025		if (GET_CODE (XEXP (x, 1)) == PLUS
3026		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3027		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3028		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3029		else
3030		  ep->can_eliminate = 0;
3031	      }
3032	  }
3033
3034      /* These two aren't unary operators.  */
3035      if (code == POST_MODIFY || code == PRE_MODIFY)
3036	break;
3037
3038      /* Fall through to generic unary operation case.  */
3039    case STRICT_LOW_PART:
3040    case NEG:          case NOT:
3041    case SIGN_EXTEND:  case ZERO_EXTEND:
3042    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3043    case FLOAT:        case FIX:
3044    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3045    case ABS:
3046    case SQRT:
3047    case FFS:
3048    case CLZ:
3049    case CTZ:
3050    case POPCOUNT:
3051    case PARITY:
3052    case BSWAP:
3053      elimination_effects (XEXP (x, 0), mem_mode);
3054      return;
3055
3056    case SUBREG:
3057      if (REG_P (SUBREG_REG (x))
3058	  && (GET_MODE_SIZE (GET_MODE (x))
3059	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3060	  && reg_equiv_memory_loc != 0
3061	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3062	return;
3063
3064      elimination_effects (SUBREG_REG (x), mem_mode);
3065      return;
3066
3067    case USE:
3068      /* If using a register that is the source of an eliminate we still
3069	 think can be performed, note it cannot be performed since we don't
3070	 know how this register is used.  */
3071      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3072	if (ep->from_rtx == XEXP (x, 0))
3073	  ep->can_eliminate = 0;
3074
3075      elimination_effects (XEXP (x, 0), mem_mode);
3076      return;
3077
3078    case CLOBBER:
3079      /* If clobbering a register that is the replacement register for an
3080	 elimination we still think can be performed, note that it cannot
3081	 be performed.  Otherwise, we need not be concerned about it.  */
3082      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3083	if (ep->to_rtx == XEXP (x, 0))
3084	  ep->can_eliminate = 0;
3085
3086      elimination_effects (XEXP (x, 0), mem_mode);
3087      return;
3088
3089    case SET:
3090      /* Check for setting a register that we know about.  */
3091      if (REG_P (SET_DEST (x)))
3092	{
3093	  /* See if this is setting the replacement register for an
3094	     elimination.
3095
3096	     If DEST is the hard frame pointer, we do nothing because we
3097	     assume that all assignments to the frame pointer are for
3098	     non-local gotos and are being done at a time when they are valid
3099	     and do not disturb anything else.  Some machines want to
3100	     eliminate a fake argument pointer (or even a fake frame pointer)
3101	     with either the real frame or the stack pointer.  Assignments to
3102	     the hard frame pointer must not prevent this elimination.  */
3103
3104	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3105	       ep++)
3106	    if (ep->to_rtx == SET_DEST (x)
3107		&& SET_DEST (x) != hard_frame_pointer_rtx)
3108	      {
3109		/* If it is being incremented, adjust the offset.  Otherwise,
3110		   this elimination can't be done.  */
3111		rtx src = SET_SRC (x);
3112
3113		if (GET_CODE (src) == PLUS
3114		    && XEXP (src, 0) == SET_DEST (x)
3115		    && CONST_INT_P (XEXP (src, 1)))
3116		  ep->offset -= INTVAL (XEXP (src, 1));
3117		else
3118		  ep->can_eliminate = 0;
3119	      }
3120	}
3121
3122      elimination_effects (SET_DEST (x), VOIDmode);
3123      elimination_effects (SET_SRC (x), VOIDmode);
3124      return;
3125
3126    case MEM:
3127      /* Our only special processing is to pass the mode of the MEM to our
3128	 recursive call.  */
3129      elimination_effects (XEXP (x, 0), GET_MODE (x));
3130      return;
3131
3132    default:
3133      break;
3134    }
3135
3136  fmt = GET_RTX_FORMAT (code);
3137  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3138    {
3139      if (*fmt == 'e')
3140	elimination_effects (XEXP (x, i), mem_mode);
3141      else if (*fmt == 'E')
3142	for (j = 0; j < XVECLEN (x, i); j++)
3143	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3144    }
3145}
3146
3147/* Descend through rtx X and verify that no references to eliminable registers
3148   remain.  If any do remain, mark the involved register as not
3149   eliminable.  */
3150
3151static void
3152check_eliminable_occurrences (rtx x)
3153{
3154  const char *fmt;
3155  int i;
3156  enum rtx_code code;
3157
3158  if (x == 0)
3159    return;
3160
3161  code = GET_CODE (x);
3162
3163  if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3164    {
3165      struct elim_table *ep;
3166
3167      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3168	if (ep->from_rtx == x)
3169	  ep->can_eliminate = 0;
3170      return;
3171    }
3172
3173  fmt = GET_RTX_FORMAT (code);
3174  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3175    {
3176      if (*fmt == 'e')
3177	check_eliminable_occurrences (XEXP (x, i));
3178      else if (*fmt == 'E')
3179	{
3180	  int j;
3181	  for (j = 0; j < XVECLEN (x, i); j++)
3182	    check_eliminable_occurrences (XVECEXP (x, i, j));
3183	}
3184    }
3185}
3186
3187/* Scan INSN and eliminate all eliminable registers in it.
3188
3189   If REPLACE is nonzero, do the replacement destructively.  Also
3190   delete the insn as dead it if it is setting an eliminable register.
3191
3192   If REPLACE is zero, do all our allocations in reload_obstack.
3193
3194   If no eliminations were done and this insn doesn't require any elimination
3195   processing (these are not identical conditions: it might be updating sp,
3196   but not referencing fp; this needs to be seen during reload_as_needed so
3197   that the offset between fp and sp can be taken into consideration), zero
3198   is returned.  Otherwise, 1 is returned.  */
3199
3200static int
3201eliminate_regs_in_insn (rtx insn, int replace)
3202{
3203  int icode = recog_memoized (insn);
3204  rtx old_body = PATTERN (insn);
3205  int insn_is_asm = asm_noperands (old_body) >= 0;
3206  rtx old_set = single_set (insn);
3207  rtx new_body;
3208  int val = 0;
3209  int i;
3210  rtx substed_operand[MAX_RECOG_OPERANDS];
3211  rtx orig_operand[MAX_RECOG_OPERANDS];
3212  struct elim_table *ep;
3213  rtx plus_src, plus_cst_src;
3214
3215  if (! insn_is_asm && icode < 0)
3216    {
3217      gcc_assert (GET_CODE (PATTERN (insn)) == USE
3218		  || GET_CODE (PATTERN (insn)) == CLOBBER
3219		  || GET_CODE (PATTERN (insn)) == ADDR_VEC
3220		  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3221		  || GET_CODE (PATTERN (insn)) == ASM_INPUT
3222		  || DEBUG_INSN_P (insn));
3223      if (DEBUG_INSN_P (insn))
3224	INSN_VAR_LOCATION_LOC (insn)
3225	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3226      return 0;
3227    }
3228
3229  if (old_set != 0 && REG_P (SET_DEST (old_set))
3230      && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3231    {
3232      /* Check for setting an eliminable register.  */
3233      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3234	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3235	  {
3236#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3237	    /* If this is setting the frame pointer register to the
3238	       hardware frame pointer register and this is an elimination
3239	       that will be done (tested above), this insn is really
3240	       adjusting the frame pointer downward to compensate for
3241	       the adjustment done before a nonlocal goto.  */
3242	    if (ep->from == FRAME_POINTER_REGNUM
3243		&& ep->to == HARD_FRAME_POINTER_REGNUM)
3244	      {
3245		rtx base = SET_SRC (old_set);
3246		rtx base_insn = insn;
3247		HOST_WIDE_INT offset = 0;
3248
3249		while (base != ep->to_rtx)
3250		  {
3251		    rtx prev_insn, prev_set;
3252
3253		    if (GET_CODE (base) == PLUS
3254		        && CONST_INT_P (XEXP (base, 1)))
3255		      {
3256		        offset += INTVAL (XEXP (base, 1));
3257		        base = XEXP (base, 0);
3258		      }
3259		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3260			     && (prev_set = single_set (prev_insn)) != 0
3261			     && rtx_equal_p (SET_DEST (prev_set), base))
3262		      {
3263		        base = SET_SRC (prev_set);
3264		        base_insn = prev_insn;
3265		      }
3266		    else
3267		      break;
3268		  }
3269
3270		if (base == ep->to_rtx)
3271		  {
3272		    rtx src
3273		      = plus_constant (ep->to_rtx, offset - ep->offset);
3274
3275		    new_body = old_body;
3276		    if (! replace)
3277		      {
3278			new_body = copy_insn (old_body);
3279			if (REG_NOTES (insn))
3280			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3281		      }
3282		    PATTERN (insn) = new_body;
3283		    old_set = single_set (insn);
3284
3285		    /* First see if this insn remains valid when we
3286		       make the change.  If not, keep the INSN_CODE
3287		       the same and let reload fit it up.  */
3288		    validate_change (insn, &SET_SRC (old_set), src, 1);
3289		    validate_change (insn, &SET_DEST (old_set),
3290				     ep->to_rtx, 1);
3291		    if (! apply_change_group ())
3292		      {
3293			SET_SRC (old_set) = src;
3294			SET_DEST (old_set) = ep->to_rtx;
3295		      }
3296
3297		    val = 1;
3298		    goto done;
3299		  }
3300	      }
3301#endif
3302
3303	    /* In this case this insn isn't serving a useful purpose.  We
3304	       will delete it in reload_as_needed once we know that this
3305	       elimination is, in fact, being done.
3306
3307	       If REPLACE isn't set, we can't delete this insn, but needn't
3308	       process it since it won't be used unless something changes.  */
3309	    if (replace)
3310	      {
3311		delete_dead_insn (insn);
3312		return 1;
3313	      }
3314	    val = 1;
3315	    goto done;
3316	  }
3317    }
3318
3319  /* We allow one special case which happens to work on all machines we
3320     currently support: a single set with the source or a REG_EQUAL
3321     note being a PLUS of an eliminable register and a constant.  */
3322  plus_src = plus_cst_src = 0;
3323  if (old_set && REG_P (SET_DEST (old_set)))
3324    {
3325      if (GET_CODE (SET_SRC (old_set)) == PLUS)
3326	plus_src = SET_SRC (old_set);
3327      /* First see if the source is of the form (plus (...) CST).  */
3328      if (plus_src
3329	  && CONST_INT_P (XEXP (plus_src, 1)))
3330	plus_cst_src = plus_src;
3331      else if (REG_P (SET_SRC (old_set))
3332	       || plus_src)
3333	{
3334	  /* Otherwise, see if we have a REG_EQUAL note of the form
3335	     (plus (...) CST).  */
3336	  rtx links;
3337	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3338	    {
3339	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3340		   || REG_NOTE_KIND (links) == REG_EQUIV)
3341		  && GET_CODE (XEXP (links, 0)) == PLUS
3342		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3343		{
3344		  plus_cst_src = XEXP (links, 0);
3345		  break;
3346		}
3347	    }
3348	}
3349
3350      /* Check that the first operand of the PLUS is a hard reg or
3351	 the lowpart subreg of one.  */
3352      if (plus_cst_src)
3353	{
3354	  rtx reg = XEXP (plus_cst_src, 0);
3355	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3356	    reg = SUBREG_REG (reg);
3357
3358	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3359	    plus_cst_src = 0;
3360	}
3361    }
3362  if (plus_cst_src)
3363    {
3364      rtx reg = XEXP (plus_cst_src, 0);
3365      HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3366
3367      if (GET_CODE (reg) == SUBREG)
3368	reg = SUBREG_REG (reg);
3369
3370      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3371	if (ep->from_rtx == reg && ep->can_eliminate)
3372	  {
3373	    rtx to_rtx = ep->to_rtx;
3374	    offset += ep->offset;
3375	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3376
3377	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3378	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3379				    to_rtx);
3380	    /* If we have a nonzero offset, and the source is already
3381	       a simple REG, the following transformation would
3382	       increase the cost of the insn by replacing a simple REG
3383	       with (plus (reg sp) CST).  So try only when we already
3384	       had a PLUS before.  */
3385	    if (offset == 0 || plus_src)
3386	      {
3387		rtx new_src = plus_constant (to_rtx, offset);
3388
3389		new_body = old_body;
3390		if (! replace)
3391		  {
3392		    new_body = copy_insn (old_body);
3393		    if (REG_NOTES (insn))
3394		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3395		  }
3396		PATTERN (insn) = new_body;
3397		old_set = single_set (insn);
3398
3399		/* First see if this insn remains valid when we make the
3400		   change.  If not, try to replace the whole pattern with
3401		   a simple set (this may help if the original insn was a
3402		   PARALLEL that was only recognized as single_set due to
3403		   REG_UNUSED notes).  If this isn't valid either, keep
3404		   the INSN_CODE the same and let reload fix it up.  */
3405		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3406		  {
3407		    rtx new_pat = gen_rtx_SET (VOIDmode,
3408					       SET_DEST (old_set), new_src);
3409
3410		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3411		      SET_SRC (old_set) = new_src;
3412		  }
3413	      }
3414	    else
3415	      break;
3416
3417	    val = 1;
3418	    /* This can't have an effect on elimination offsets, so skip right
3419	       to the end.  */
3420	    goto done;
3421	  }
3422    }
3423
3424  /* Determine the effects of this insn on elimination offsets.  */
3425  elimination_effects (old_body, VOIDmode);
3426
3427  /* Eliminate all eliminable registers occurring in operands that
3428     can be handled by reload.  */
3429  extract_insn (insn);
3430  for (i = 0; i < recog_data.n_operands; i++)
3431    {
3432      orig_operand[i] = recog_data.operand[i];
3433      substed_operand[i] = recog_data.operand[i];
3434
3435      /* For an asm statement, every operand is eliminable.  */
3436      if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3437	{
3438	  bool is_set_src, in_plus;
3439
3440	  /* Check for setting a register that we know about.  */
3441	  if (recog_data.operand_type[i] != OP_IN
3442	      && REG_P (orig_operand[i]))
3443	    {
3444	      /* If we are assigning to a register that can be eliminated, it
3445		 must be as part of a PARALLEL, since the code above handles
3446		 single SETs.  We must indicate that we can no longer
3447		 eliminate this reg.  */
3448	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3449		   ep++)
3450		if (ep->from_rtx == orig_operand[i])
3451		  ep->can_eliminate = 0;
3452	    }
3453
3454	  /* Companion to the above plus substitution, we can allow
3455	     invariants as the source of a plain move.  */
3456	  is_set_src = false;
3457	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3458	    is_set_src = true;
3459	  in_plus = false;
3460	  if (plus_src
3461	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3462		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3463	    in_plus = true;
3464
3465	  substed_operand[i]
3466	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3467			        replace ? insn : NULL_RTX,
3468				is_set_src || in_plus);
3469	  if (substed_operand[i] != orig_operand[i])
3470	    val = 1;
3471	  /* Terminate the search in check_eliminable_occurrences at
3472	     this point.  */
3473	  *recog_data.operand_loc[i] = 0;
3474
3475	  /* If an output operand changed from a REG to a MEM and INSN is an
3476	     insn, write a CLOBBER insn.  */
3477	  if (recog_data.operand_type[i] != OP_IN
3478	      && REG_P (orig_operand[i])
3479	      && MEM_P (substed_operand[i])
3480	      && replace)
3481	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3482	}
3483    }
3484
3485  for (i = 0; i < recog_data.n_dups; i++)
3486    *recog_data.dup_loc[i]
3487      = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3488
3489  /* If any eliminable remain, they aren't eliminable anymore.  */
3490  check_eliminable_occurrences (old_body);
3491
3492  /* Substitute the operands; the new values are in the substed_operand
3493     array.  */
3494  for (i = 0; i < recog_data.n_operands; i++)
3495    *recog_data.operand_loc[i] = substed_operand[i];
3496  for (i = 0; i < recog_data.n_dups; i++)
3497    *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3498
3499  /* If we are replacing a body that was a (set X (plus Y Z)), try to
3500     re-recognize the insn.  We do this in case we had a simple addition
3501     but now can do this as a load-address.  This saves an insn in this
3502     common case.
3503     If re-recognition fails, the old insn code number will still be used,
3504     and some register operands may have changed into PLUS expressions.
3505     These will be handled by find_reloads by loading them into a register
3506     again.  */
3507
3508  if (val)
3509    {
3510      /* If we aren't replacing things permanently and we changed something,
3511	 make another copy to ensure that all the RTL is new.  Otherwise
3512	 things can go wrong if find_reload swaps commutative operands
3513	 and one is inside RTL that has been copied while the other is not.  */
3514      new_body = old_body;
3515      if (! replace)
3516	{
3517	  new_body = copy_insn (old_body);
3518	  if (REG_NOTES (insn))
3519	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3520	}
3521      PATTERN (insn) = new_body;
3522
3523      /* If we had a move insn but now we don't, rerecognize it.  This will
3524	 cause spurious re-recognition if the old move had a PARALLEL since
3525	 the new one still will, but we can't call single_set without
3526	 having put NEW_BODY into the insn and the re-recognition won't
3527	 hurt in this rare case.  */
3528      /* ??? Why this huge if statement - why don't we just rerecognize the
3529	 thing always?  */
3530      if (! insn_is_asm
3531	  && old_set != 0
3532	  && ((REG_P (SET_SRC (old_set))
3533	       && (GET_CODE (new_body) != SET
3534		   || !REG_P (SET_SRC (new_body))))
3535	      /* If this was a load from or store to memory, compare
3536		 the MEM in recog_data.operand to the one in the insn.
3537		 If they are not equal, then rerecognize the insn.  */
3538	      || (old_set != 0
3539		  && ((MEM_P (SET_SRC (old_set))
3540		       && SET_SRC (old_set) != recog_data.operand[1])
3541		      || (MEM_P (SET_DEST (old_set))
3542			  && SET_DEST (old_set) != recog_data.operand[0])))
3543	      /* If this was an add insn before, rerecognize.  */
3544	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3545	{
3546	  int new_icode = recog (PATTERN (insn), insn, 0);
3547	  if (new_icode >= 0)
3548	    INSN_CODE (insn) = new_icode;
3549	}
3550    }
3551
3552  /* Restore the old body.  If there were any changes to it, we made a copy
3553     of it while the changes were still in place, so we'll correctly return
3554     a modified insn below.  */
3555  if (! replace)
3556    {
3557      /* Restore the old body.  */
3558      for (i = 0; i < recog_data.n_operands; i++)
3559	*recog_data.operand_loc[i] = orig_operand[i];
3560      for (i = 0; i < recog_data.n_dups; i++)
3561	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3562    }
3563
3564  /* Update all elimination pairs to reflect the status after the current
3565     insn.  The changes we make were determined by the earlier call to
3566     elimination_effects.
3567
3568     We also detect cases where register elimination cannot be done,
3569     namely, if a register would be both changed and referenced outside a MEM
3570     in the resulting insn since such an insn is often undefined and, even if
3571     not, we cannot know what meaning will be given to it.  Note that it is
3572     valid to have a register used in an address in an insn that changes it
3573     (presumably with a pre- or post-increment or decrement).
3574
3575     If anything changes, return nonzero.  */
3576
3577  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3578    {
3579      if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3580	ep->can_eliminate = 0;
3581
3582      ep->ref_outside_mem = 0;
3583
3584      if (ep->previous_offset != ep->offset)
3585	val = 1;
3586    }
3587
3588 done:
3589  /* If we changed something, perform elimination in REG_NOTES.  This is
3590     needed even when REPLACE is zero because a REG_DEAD note might refer
3591     to a register that we eliminate and could cause a different number
3592     of spill registers to be needed in the final reload pass than in
3593     the pre-passes.  */
3594  if (val && REG_NOTES (insn) != 0)
3595    REG_NOTES (insn)
3596      = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true);
3597
3598  return val;
3599}
3600
3601/* Loop through all elimination pairs.
3602   Recalculate the number not at initial offset.
3603
3604   Compute the maximum offset (minimum offset if the stack does not
3605   grow downward) for each elimination pair.  */
3606
3607static void
3608update_eliminable_offsets (void)
3609{
3610  struct elim_table *ep;
3611
3612  num_not_at_initial_offset = 0;
3613  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3614    {
3615      ep->previous_offset = ep->offset;
3616      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3617	num_not_at_initial_offset++;
3618    }
3619}
3620
3621/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3622   replacement we currently believe is valid, mark it as not eliminable if X
3623   modifies DEST in any way other than by adding a constant integer to it.
3624
3625   If DEST is the frame pointer, we do nothing because we assume that
3626   all assignments to the hard frame pointer are nonlocal gotos and are being
3627   done at a time when they are valid and do not disturb anything else.
3628   Some machines want to eliminate a fake argument pointer with either the
3629   frame or stack pointer.  Assignments to the hard frame pointer must not
3630   prevent this elimination.
3631
3632   Called via note_stores from reload before starting its passes to scan
3633   the insns of the function.  */
3634
3635static void
3636mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3637{
3638  unsigned int i;
3639
3640  /* A SUBREG of a hard register here is just changing its mode.  We should
3641     not see a SUBREG of an eliminable hard register, but check just in
3642     case.  */
3643  if (GET_CODE (dest) == SUBREG)
3644    dest = SUBREG_REG (dest);
3645
3646  if (dest == hard_frame_pointer_rtx)
3647    return;
3648
3649  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3650    if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3651	&& (GET_CODE (x) != SET
3652	    || GET_CODE (SET_SRC (x)) != PLUS
3653	    || XEXP (SET_SRC (x), 0) != dest
3654	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3655      {
3656	reg_eliminate[i].can_eliminate_previous
3657	  = reg_eliminate[i].can_eliminate = 0;
3658	num_eliminable--;
3659      }
3660}
3661
3662/* Verify that the initial elimination offsets did not change since the
3663   last call to set_initial_elim_offsets.  This is used to catch cases
3664   where something illegal happened during reload_as_needed that could
3665   cause incorrect code to be generated if we did not check for it.  */
3666
3667static bool
3668verify_initial_elim_offsets (void)
3669{
3670  HOST_WIDE_INT t;
3671
3672  if (!num_eliminable)
3673    return true;
3674
3675#ifdef ELIMINABLE_REGS
3676  {
3677   struct elim_table *ep;
3678
3679   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3680     {
3681       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3682       if (t != ep->initial_offset)
3683	 return false;
3684     }
3685  }
3686#else
3687  INITIAL_FRAME_POINTER_OFFSET (t);
3688  if (t != reg_eliminate[0].initial_offset)
3689    return false;
3690#endif
3691
3692  return true;
3693}
3694
3695/* Reset all offsets on eliminable registers to their initial values.  */
3696
3697static void
3698set_initial_elim_offsets (void)
3699{
3700  struct elim_table *ep = reg_eliminate;
3701
3702#ifdef ELIMINABLE_REGS
3703  for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3704    {
3705      INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3706      ep->previous_offset = ep->offset = ep->initial_offset;
3707    }
3708#else
3709  INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3710  ep->previous_offset = ep->offset = ep->initial_offset;
3711#endif
3712
3713  num_not_at_initial_offset = 0;
3714}
3715
3716/* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3717
3718static void
3719set_initial_eh_label_offset (rtx label)
3720{
3721  set_label_offsets (label, NULL_RTX, 1);
3722}
3723
3724/* Initialize the known label offsets.
3725   Set a known offset for each forced label to be at the initial offset
3726   of each elimination.  We do this because we assume that all
3727   computed jumps occur from a location where each elimination is
3728   at its initial offset.
3729   For all other labels, show that we don't know the offsets.  */
3730
3731static void
3732set_initial_label_offsets (void)
3733{
3734  rtx x;
3735  memset (offsets_known_at, 0, num_labels);
3736
3737  for (x = forced_labels; x; x = XEXP (x, 1))
3738    if (XEXP (x, 0))
3739      set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3740
3741  for_each_eh_label (set_initial_eh_label_offset);
3742}
3743
3744/* Set all elimination offsets to the known values for the code label given
3745   by INSN.  */
3746
3747static void
3748set_offsets_for_label (rtx insn)
3749{
3750  unsigned int i;
3751  int label_nr = CODE_LABEL_NUMBER (insn);
3752  struct elim_table *ep;
3753
3754  num_not_at_initial_offset = 0;
3755  for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3756    {
3757      ep->offset = ep->previous_offset
3758		 = offsets_at[label_nr - first_label_num][i];
3759      if (ep->can_eliminate && ep->offset != ep->initial_offset)
3760	num_not_at_initial_offset++;
3761    }
3762}
3763
3764/* See if anything that happened changes which eliminations are valid.
3765   For example, on the SPARC, whether or not the frame pointer can
3766   be eliminated can depend on what registers have been used.  We need
3767   not check some conditions again (such as flag_omit_frame_pointer)
3768   since they can't have changed.  */
3769
3770static void
3771update_eliminables (HARD_REG_SET *pset)
3772{
3773  int previous_frame_pointer_needed = frame_pointer_needed;
3774  struct elim_table *ep;
3775
3776  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3777    if ((ep->from == HARD_FRAME_POINTER_REGNUM
3778         && targetm.frame_pointer_required ())
3779#ifdef ELIMINABLE_REGS
3780	|| ! targetm.can_eliminate (ep->from, ep->to)
3781#endif
3782	)
3783      ep->can_eliminate = 0;
3784
3785  /* Look for the case where we have discovered that we can't replace
3786     register A with register B and that means that we will now be
3787     trying to replace register A with register C.  This means we can
3788     no longer replace register C with register B and we need to disable
3789     such an elimination, if it exists.  This occurs often with A == ap,
3790     B == sp, and C == fp.  */
3791
3792  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3793    {
3794      struct elim_table *op;
3795      int new_to = -1;
3796
3797      if (! ep->can_eliminate && ep->can_eliminate_previous)
3798	{
3799	  /* Find the current elimination for ep->from, if there is a
3800	     new one.  */
3801	  for (op = reg_eliminate;
3802	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3803	    if (op->from == ep->from && op->can_eliminate)
3804	      {
3805		new_to = op->to;
3806		break;
3807	      }
3808
3809	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3810	     disable it.  */
3811	  for (op = reg_eliminate;
3812	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3813	    if (op->from == new_to && op->to == ep->to)
3814	      op->can_eliminate = 0;
3815	}
3816    }
3817
3818  /* See if any registers that we thought we could eliminate the previous
3819     time are no longer eliminable.  If so, something has changed and we
3820     must spill the register.  Also, recompute the number of eliminable
3821     registers and see if the frame pointer is needed; it is if there is
3822     no elimination of the frame pointer that we can perform.  */
3823
3824  frame_pointer_needed = 1;
3825  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3826    {
3827      if (ep->can_eliminate
3828	  && ep->from == FRAME_POINTER_REGNUM
3829	  && ep->to != HARD_FRAME_POINTER_REGNUM
3830	  && (! SUPPORTS_STACK_ALIGNMENT
3831	      || ! crtl->stack_realign_needed))
3832	frame_pointer_needed = 0;
3833
3834      if (! ep->can_eliminate && ep->can_eliminate_previous)
3835	{
3836	  ep->can_eliminate_previous = 0;
3837	  SET_HARD_REG_BIT (*pset, ep->from);
3838	  num_eliminable--;
3839	}
3840    }
3841
3842  /* If we didn't need a frame pointer last time, but we do now, spill
3843     the hard frame pointer.  */
3844  if (frame_pointer_needed && ! previous_frame_pointer_needed)
3845    SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3846}
3847
3848/* Return true if X is used as the target register of an elimination.  */
3849
3850bool
3851elimination_target_reg_p (rtx x)
3852{
3853  struct elim_table *ep;
3854
3855  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3856    if (ep->to_rtx == x && ep->can_eliminate)
3857      return true;
3858
3859  return false;
3860}
3861
3862/* Initialize the table of registers to eliminate.
3863   Pre-condition: global flag frame_pointer_needed has been set before
3864   calling this function.  */
3865
3866static void
3867init_elim_table (void)
3868{
3869  struct elim_table *ep;
3870#ifdef ELIMINABLE_REGS
3871  const struct elim_table_1 *ep1;
3872#endif
3873
3874  if (!reg_eliminate)
3875    reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3876
3877  num_eliminable = 0;
3878
3879#ifdef ELIMINABLE_REGS
3880  for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3881       ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3882    {
3883      ep->from = ep1->from;
3884      ep->to = ep1->to;
3885      ep->can_eliminate = ep->can_eliminate_previous
3886	= (targetm.can_eliminate (ep->from, ep->to)
3887	   && ! (ep->to == STACK_POINTER_REGNUM
3888		 && frame_pointer_needed
3889		 && (! SUPPORTS_STACK_ALIGNMENT
3890		     || ! stack_realign_fp)));
3891    }
3892#else
3893  reg_eliminate[0].from = reg_eliminate_1[0].from;
3894  reg_eliminate[0].to = reg_eliminate_1[0].to;
3895  reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3896    = ! frame_pointer_needed;
3897#endif
3898
3899  /* Count the number of eliminable registers and build the FROM and TO
3900     REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
3901     gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3902     We depend on this.  */
3903  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3904    {
3905      num_eliminable += ep->can_eliminate;
3906      ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3907      ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3908    }
3909}
3910
3911/* Kick all pseudos out of hard register REGNO.
3912
3913   If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3914   because we found we can't eliminate some register.  In the case, no pseudos
3915   are allowed to be in the register, even if they are only in a block that
3916   doesn't require spill registers, unlike the case when we are spilling this
3917   hard reg to produce another spill register.
3918
3919   Return nonzero if any pseudos needed to be kicked out.  */
3920
3921static void
3922spill_hard_reg (unsigned int regno, int cant_eliminate)
3923{
3924  int i;
3925
3926  if (cant_eliminate)
3927    {
3928      SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3929      df_set_regs_ever_live (regno, true);
3930    }
3931
3932  /* Spill every pseudo reg that was allocated to this reg
3933     or to something that overlaps this reg.  */
3934
3935  for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3936    if (reg_renumber[i] >= 0
3937	&& (unsigned int) reg_renumber[i] <= regno
3938	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
3939      SET_REGNO_REG_SET (&spilled_pseudos, i);
3940}
3941
3942/* After find_reload_regs has been run for all insn that need reloads,
3943   and/or spill_hard_regs was called, this function is used to actually
3944   spill pseudo registers and try to reallocate them.  It also sets up the
3945   spill_regs array for use by choose_reload_regs.  */
3946
3947static int
3948finish_spills (int global)
3949{
3950  struct insn_chain *chain;
3951  int something_changed = 0;
3952  unsigned i;
3953  reg_set_iterator rsi;
3954
3955  /* Build the spill_regs array for the function.  */
3956  /* If there are some registers still to eliminate and one of the spill regs
3957     wasn't ever used before, additional stack space may have to be
3958     allocated to store this register.  Thus, we may have changed the offset
3959     between the stack and frame pointers, so mark that something has changed.
3960
3961     One might think that we need only set VAL to 1 if this is a call-used
3962     register.  However, the set of registers that must be saved by the
3963     prologue is not identical to the call-used set.  For example, the
3964     register used by the call insn for the return PC is a call-used register,
3965     but must be saved by the prologue.  */
3966
3967  n_spills = 0;
3968  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3969    if (TEST_HARD_REG_BIT (used_spill_regs, i))
3970      {
3971	spill_reg_order[i] = n_spills;
3972	spill_regs[n_spills++] = i;
3973	if (num_eliminable && ! df_regs_ever_live_p (i))
3974	  something_changed = 1;
3975	df_set_regs_ever_live (i, true);
3976      }
3977    else
3978      spill_reg_order[i] = -1;
3979
3980  EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
3981    if (! ira_conflicts_p || reg_renumber[i] >= 0)
3982      {
3983	/* Record the current hard register the pseudo is allocated to
3984	   in pseudo_previous_regs so we avoid reallocating it to the
3985	   same hard reg in a later pass.  */
3986	gcc_assert (reg_renumber[i] >= 0);
3987
3988	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3989	/* Mark it as no longer having a hard register home.  */
3990	reg_renumber[i] = -1;
3991	if (ira_conflicts_p)
3992	  /* Inform IRA about the change.  */
3993	  ira_mark_allocation_change (i);
3994	/* We will need to scan everything again.  */
3995	something_changed = 1;
3996      }
3997
3998  /* Retry global register allocation if possible.  */
3999  if (global && ira_conflicts_p)
4000    {
4001      unsigned int n;
4002
4003      memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4004      /* For every insn that needs reloads, set the registers used as spill
4005	 regs in pseudo_forbidden_regs for every pseudo live across the
4006	 insn.  */
4007      for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4008	{
4009	  EXECUTE_IF_SET_IN_REG_SET
4010	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4011	    {
4012	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4013				chain->used_spill_regs);
4014	    }
4015	  EXECUTE_IF_SET_IN_REG_SET
4016	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4017	    {
4018	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4019				chain->used_spill_regs);
4020	    }
4021	}
4022
4023      /* Retry allocating the pseudos spilled in IRA and the
4024	 reload.  For each reg, merge the various reg sets that
4025	 indicate which hard regs can't be used, and call
4026	 ira_reassign_pseudos.  */
4027      for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4028	if (reg_old_renumber[i] != reg_renumber[i])
4029	  {
4030	    if (reg_renumber[i] < 0)
4031	      temp_pseudo_reg_arr[n++] = i;
4032	    else
4033	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4034	  }
4035      if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4036				bad_spill_regs_global,
4037				pseudo_forbidden_regs, pseudo_previous_regs,
4038				&spilled_pseudos))
4039	something_changed = 1;
4040    }
4041  /* Fix up the register information in the insn chain.
4042     This involves deleting those of the spilled pseudos which did not get
4043     a new hard register home from the live_{before,after} sets.  */
4044  for (chain = reload_insn_chain; chain; chain = chain->next)
4045    {
4046      HARD_REG_SET used_by_pseudos;
4047      HARD_REG_SET used_by_pseudos2;
4048
4049      if (! ira_conflicts_p)
4050	{
4051	  /* Don't do it for IRA because IRA and the reload still can
4052	     assign hard registers to the spilled pseudos on next
4053	     reload iterations.  */
4054	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4055	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4056	}
4057      /* Mark any unallocated hard regs as available for spills.  That
4058	 makes inheritance work somewhat better.  */
4059      if (chain->need_reload)
4060	{
4061	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4062	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4063	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4064
4065	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4066	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4067	  /* Value of chain->used_spill_regs from previous iteration
4068	     may be not included in the value calculated here because
4069	     of possible removing caller-saves insns (see function
4070	     delete_caller_save_insns.  */
4071	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4072	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4073	}
4074    }
4075
4076  CLEAR_REG_SET (&changed_allocation_pseudos);
4077  /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4078  for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4079    {
4080      int regno = reg_renumber[i];
4081      if (reg_old_renumber[i] == regno)
4082	continue;
4083
4084      SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4085
4086      alter_reg (i, reg_old_renumber[i], false);
4087      reg_old_renumber[i] = regno;
4088      if (dump_file)
4089	{
4090	  if (regno == -1)
4091	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4092	  else
4093	    fprintf (dump_file, " Register %d now in %d.\n\n",
4094		     i, reg_renumber[i]);
4095	}
4096    }
4097
4098  return something_changed;
4099}
4100
4101/* Find all paradoxical subregs within X and update reg_max_ref_width.  */
4102
4103static void
4104scan_paradoxical_subregs (rtx x)
4105{
4106  int i;
4107  const char *fmt;
4108  enum rtx_code code = GET_CODE (x);
4109
4110  switch (code)
4111    {
4112    case REG:
4113    case CONST_INT:
4114    case CONST:
4115    case SYMBOL_REF:
4116    case LABEL_REF:
4117    case CONST_DOUBLE:
4118    case CONST_FIXED:
4119    case CONST_VECTOR: /* shouldn't happen, but just in case.  */
4120    case CC0:
4121    case PC:
4122    case USE:
4123    case CLOBBER:
4124      return;
4125
4126    case SUBREG:
4127      if (REG_P (SUBREG_REG (x))
4128	  && (GET_MODE_SIZE (GET_MODE (x))
4129	      > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4130	{
4131	  reg_max_ref_width[REGNO (SUBREG_REG (x))]
4132	    = GET_MODE_SIZE (GET_MODE (x));
4133	  mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4134	}
4135      return;
4136
4137    default:
4138      break;
4139    }
4140
4141  fmt = GET_RTX_FORMAT (code);
4142  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4143    {
4144      if (fmt[i] == 'e')
4145	scan_paradoxical_subregs (XEXP (x, i));
4146      else if (fmt[i] == 'E')
4147	{
4148	  int j;
4149	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4150	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4151	}
4152    }
4153}
4154
4155/* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4156   examine all of the reload insns between PREV and NEXT exclusive, and
4157   annotate all that may trap.  */
4158
4159static void
4160fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4161{
4162  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4163  if (note == NULL)
4164    return;
4165  if (!insn_could_throw_p (insn))
4166    remove_note (insn, note);
4167  copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4168}
4169
4170/* Reload pseudo-registers into hard regs around each insn as needed.
4171   Additional register load insns are output before the insn that needs it
4172   and perhaps store insns after insns that modify the reloaded pseudo reg.
4173
4174   reg_last_reload_reg and reg_reloaded_contents keep track of
4175   which registers are already available in reload registers.
4176   We update these for the reloads that we perform,
4177   as the insns are scanned.  */
4178
4179static void
4180reload_as_needed (int live_known)
4181{
4182  struct insn_chain *chain;
4183#if defined (AUTO_INC_DEC)
4184  int i;
4185#endif
4186  rtx x;
4187
4188  memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4189  memset (spill_reg_store, 0, sizeof spill_reg_store);
4190  reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4191  INIT_REG_SET (&reg_has_output_reload);
4192  CLEAR_HARD_REG_SET (reg_reloaded_valid);
4193  CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4194
4195  set_initial_elim_offsets ();
4196
4197  for (chain = reload_insn_chain; chain; chain = chain->next)
4198    {
4199      rtx prev = 0;
4200      rtx insn = chain->insn;
4201      rtx old_next = NEXT_INSN (insn);
4202#ifdef AUTO_INC_DEC
4203      rtx old_prev = PREV_INSN (insn);
4204#endif
4205
4206      /* If we pass a label, copy the offsets from the label information
4207	 into the current offsets of each elimination.  */
4208      if (LABEL_P (insn))
4209	set_offsets_for_label (insn);
4210
4211      else if (INSN_P (insn))
4212	{
4213	  regset_head regs_to_forget;
4214	  INIT_REG_SET (&regs_to_forget);
4215	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4216
4217	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4218	     references to eliminable registers have been removed.  */
4219
4220	  if ((GET_CODE (PATTERN (insn)) == USE
4221	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4222	      && MEM_P (XEXP (PATTERN (insn), 0)))
4223	    XEXP (XEXP (PATTERN (insn), 0), 0)
4224	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4225				GET_MODE (XEXP (PATTERN (insn), 0)),
4226				NULL_RTX);
4227
4228	  /* If we need to do register elimination processing, do so.
4229	     This might delete the insn, in which case we are done.  */
4230	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4231	    {
4232	      eliminate_regs_in_insn (insn, 1);
4233	      if (NOTE_P (insn))
4234		{
4235		  update_eliminable_offsets ();
4236		  CLEAR_REG_SET (&regs_to_forget);
4237		  continue;
4238		}
4239	    }
4240
4241	  /* If need_elim is nonzero but need_reload is zero, one might think
4242	     that we could simply set n_reloads to 0.  However, find_reloads
4243	     could have done some manipulation of the insn (such as swapping
4244	     commutative operands), and these manipulations are lost during
4245	     the first pass for every insn that needs register elimination.
4246	     So the actions of find_reloads must be redone here.  */
4247
4248	  if (! chain->need_elim && ! chain->need_reload
4249	      && ! chain->need_operand_change)
4250	    n_reloads = 0;
4251	  /* First find the pseudo regs that must be reloaded for this insn.
4252	     This info is returned in the tables reload_... (see reload.h).
4253	     Also modify the body of INSN by substituting RELOAD
4254	     rtx's for those pseudo regs.  */
4255	  else
4256	    {
4257	      CLEAR_REG_SET (&reg_has_output_reload);
4258	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4259
4260	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4261			    spill_reg_order);
4262	    }
4263
4264	  if (n_reloads > 0)
4265	    {
4266	      rtx next = NEXT_INSN (insn);
4267	      rtx p;
4268
4269	      prev = PREV_INSN (insn);
4270
4271	      /* Now compute which reload regs to reload them into.  Perhaps
4272		 reusing reload regs from previous insns, or else output
4273		 load insns to reload them.  Maybe output store insns too.
4274		 Record the choices of reload reg in reload_reg_rtx.  */
4275	      choose_reload_regs (chain);
4276
4277	      /* Generate the insns to reload operands into or out of
4278		 their reload regs.  */
4279	      emit_reload_insns (chain);
4280
4281	      /* Substitute the chosen reload regs from reload_reg_rtx
4282		 into the insn's body (or perhaps into the bodies of other
4283		 load and store insn that we just made for reloading
4284		 and that we moved the structure into).  */
4285	      subst_reloads (insn);
4286
4287	      /* Adjust the exception region notes for loads and stores.  */
4288	      if (flag_non_call_exceptions && !CALL_P (insn))
4289		fixup_eh_region_note (insn, prev, next);
4290
4291	      /* If this was an ASM, make sure that all the reload insns
4292		 we have generated are valid.  If not, give an error
4293		 and delete them.  */
4294	      if (asm_noperands (PATTERN (insn)) >= 0)
4295		for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4296		  if (p != insn && INSN_P (p)
4297		      && GET_CODE (PATTERN (p)) != USE
4298		      && (recog_memoized (p) < 0
4299			  || (extract_insn (p), ! constrain_operands (1))))
4300		    {
4301		      error_for_asm (insn,
4302				     "%<asm%> operand requires "
4303				     "impossible reload");
4304		      delete_insn (p);
4305		    }
4306	    }
4307
4308	  if (num_eliminable && chain->need_elim)
4309	    update_eliminable_offsets ();
4310
4311	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4312	     is no longer validly lying around to save a future reload.
4313	     Note that this does not detect pseudos that were reloaded
4314	     for this insn in order to be stored in
4315	     (obeying register constraints).  That is correct; such reload
4316	     registers ARE still valid.  */
4317	  forget_marked_reloads (&regs_to_forget);
4318	  CLEAR_REG_SET (&regs_to_forget);
4319
4320	  /* There may have been CLOBBER insns placed after INSN.  So scan
4321	     between INSN and NEXT and use them to forget old reloads.  */
4322	  for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4323	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4324	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4325
4326#ifdef AUTO_INC_DEC
4327	  /* Likewise for regs altered by auto-increment in this insn.
4328	     REG_INC notes have been changed by reloading:
4329	     find_reloads_address_1 records substitutions for them,
4330	     which have been performed by subst_reloads above.  */
4331	  for (i = n_reloads - 1; i >= 0; i--)
4332	    {
4333	      rtx in_reg = rld[i].in_reg;
4334	      if (in_reg)
4335		{
4336		  enum rtx_code code = GET_CODE (in_reg);
4337		  /* PRE_INC / PRE_DEC will have the reload register ending up
4338		     with the same value as the stack slot, but that doesn't
4339		     hold true for POST_INC / POST_DEC.  Either we have to
4340		     convert the memory access to a true POST_INC / POST_DEC,
4341		     or we can't use the reload register for inheritance.  */
4342		  if ((code == POST_INC || code == POST_DEC)
4343		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4344					    REGNO (rld[i].reg_rtx))
4345		      /* Make sure it is the inc/dec pseudo, and not
4346			 some other (e.g. output operand) pseudo.  */
4347		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4348			  == REGNO (XEXP (in_reg, 0))))
4349
4350		    {
4351		      rtx reload_reg = rld[i].reg_rtx;
4352		      enum machine_mode mode = GET_MODE (reload_reg);
4353		      int n = 0;
4354		      rtx p;
4355
4356		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4357			{
4358			  /* We really want to ignore REG_INC notes here, so
4359			     use PATTERN (p) as argument to reg_set_p .  */
4360			  if (reg_set_p (reload_reg, PATTERN (p)))
4361			    break;
4362			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4363			  if (! n)
4364			    continue;
4365			  if (n == 1)
4366			    {
4367			      rtx replace_reg
4368				= gen_rtx_fmt_e (code, mode, reload_reg);
4369
4370			      validate_replace_rtx_group (reload_reg,
4371							  replace_reg, p);
4372			      n = verify_changes (0);
4373
4374			      /* We must also verify that the constraints
4375				 are met after the replacement.  Make sure
4376				 extract_insn is only called for an insn
4377				 where the replacements were found to be
4378				 valid so far. */
4379			      if (n)
4380				{
4381				  extract_insn (p);
4382				  n = constrain_operands (1);
4383				}
4384
4385			      /* If the constraints were not met, then
4386				 undo the replacement, else confirm it.  */
4387			      if (!n)
4388				cancel_changes (0);
4389			      else
4390				confirm_change_group ();
4391			    }
4392			  break;
4393			}
4394		      if (n == 1)
4395			{
4396			  add_reg_note (p, REG_INC, reload_reg);
4397			  /* Mark this as having an output reload so that the
4398			     REG_INC processing code below won't invalidate
4399			     the reload for inheritance.  */
4400			  SET_HARD_REG_BIT (reg_is_output_reload,
4401					    REGNO (reload_reg));
4402			  SET_REGNO_REG_SET (&reg_has_output_reload,
4403					     REGNO (XEXP (in_reg, 0)));
4404			}
4405		      else
4406			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4407					      NULL);
4408		    }
4409		  else if ((code == PRE_INC || code == PRE_DEC)
4410			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4411						 REGNO (rld[i].reg_rtx))
4412			   /* Make sure it is the inc/dec pseudo, and not
4413			      some other (e.g. output operand) pseudo.  */
4414			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4415			       == REGNO (XEXP (in_reg, 0))))
4416		    {
4417		      SET_HARD_REG_BIT (reg_is_output_reload,
4418					REGNO (rld[i].reg_rtx));
4419		      SET_REGNO_REG_SET (&reg_has_output_reload,
4420					 REGNO (XEXP (in_reg, 0)));
4421		    }
4422		  else if (code == PRE_INC || code == PRE_DEC
4423			   || code == POST_INC || code == POST_DEC)
4424		    {
4425		      int in_regno = REGNO (XEXP (in_reg, 0));
4426
4427		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4428			{
4429			  int in_hard_regno;
4430			  bool forget_p = true;
4431
4432			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4433			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4434						 in_hard_regno))
4435			    {
4436			      for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4437				   x != old_next;
4438				   x = NEXT_INSN (x))
4439				if (x == reg_reloaded_insn[in_hard_regno])
4440				  {
4441				    forget_p = false;
4442				    break;
4443				  }
4444			    }
4445			  /* If for some reasons, we didn't set up
4446			     reg_last_reload_reg in this insn,
4447			     invalidate inheritance from previous
4448			     insns for the incremented/decremented
4449			     register.  Such registers will be not in
4450			     reg_has_output_reload.  Invalidate it
4451			     also if the corresponding element in
4452			     reg_reloaded_insn is also
4453			     invalidated.  */
4454			  if (forget_p)
4455			    forget_old_reloads_1 (XEXP (in_reg, 0),
4456						  NULL_RTX, NULL);
4457			}
4458		    }
4459		}
4460	    }
4461	  /* If a pseudo that got a hard register is auto-incremented,
4462	     we must purge records of copying it into pseudos without
4463	     hard registers.  */
4464	  for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4465	    if (REG_NOTE_KIND (x) == REG_INC)
4466	      {
4467		/* See if this pseudo reg was reloaded in this insn.
4468		   If so, its last-reload info is still valid
4469		   because it is based on this insn's reload.  */
4470		for (i = 0; i < n_reloads; i++)
4471		  if (rld[i].out == XEXP (x, 0))
4472		    break;
4473
4474		if (i == n_reloads)
4475		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4476	      }
4477#endif
4478	}
4479      /* A reload reg's contents are unknown after a label.  */
4480      if (LABEL_P (insn))
4481	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4482
4483      /* Don't assume a reload reg is still good after a call insn
4484	 if it is a call-used reg, or if it contains a value that will
4485         be partially clobbered by the call.  */
4486      else if (CALL_P (insn))
4487	{
4488	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4489	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4490
4491	  /* If this is a call to a setjmp-type function, we must not
4492	     reuse any reload reg contents across the call; that will
4493	     just be clobbered by other uses of the register in later
4494	     code, before the longjmp.  */
4495	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4496	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4497	}
4498    }
4499
4500  /* Clean up.  */
4501  free (reg_last_reload_reg);
4502  CLEAR_REG_SET (&reg_has_output_reload);
4503}
4504
4505/* Discard all record of any value reloaded from X,
4506   or reloaded in X from someplace else;
4507   unless X is an output reload reg of the current insn.
4508
4509   X may be a hard reg (the reload reg)
4510   or it may be a pseudo reg that was reloaded from.
4511
4512   When DATA is non-NULL just mark the registers in regset
4513   to be forgotten later.  */
4514
4515static void
4516forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4517		      void *data)
4518{
4519  unsigned int regno;
4520  unsigned int nr;
4521  regset regs = (regset) data;
4522
4523  /* note_stores does give us subregs of hard regs,
4524     subreg_regno_offset requires a hard reg.  */
4525  while (GET_CODE (x) == SUBREG)
4526    {
4527      /* We ignore the subreg offset when calculating the regno,
4528	 because we are using the entire underlying hard register
4529	 below.  */
4530      x = SUBREG_REG (x);
4531    }
4532
4533  if (!REG_P (x))
4534    return;
4535
4536  regno = REGNO (x);
4537
4538  if (regno >= FIRST_PSEUDO_REGISTER)
4539    nr = 1;
4540  else
4541    {
4542      unsigned int i;
4543
4544      nr = hard_regno_nregs[regno][GET_MODE (x)];
4545      /* Storing into a spilled-reg invalidates its contents.
4546	 This can happen if a block-local pseudo is allocated to that reg
4547	 and it wasn't spilled because this block's total need is 0.
4548	 Then some insn might have an optional reload and use this reg.  */
4549      if (!regs)
4550	for (i = 0; i < nr; i++)
4551	  /* But don't do this if the reg actually serves as an output
4552	     reload reg in the current instruction.  */
4553	  if (n_reloads == 0
4554	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4555	    {
4556	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4557	      spill_reg_store[regno + i] = 0;
4558	    }
4559    }
4560
4561  if (regs)
4562    while (nr-- > 0)
4563      SET_REGNO_REG_SET (regs, regno + nr);
4564  else
4565    {
4566      /* Since value of X has changed,
4567	 forget any value previously copied from it.  */
4568
4569      while (nr-- > 0)
4570	/* But don't forget a copy if this is the output reload
4571	   that establishes the copy's validity.  */
4572	if (n_reloads == 0
4573	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4574	  reg_last_reload_reg[regno + nr] = 0;
4575     }
4576}
4577
4578/* Forget the reloads marked in regset by previous function.  */
4579static void
4580forget_marked_reloads (regset regs)
4581{
4582  unsigned int reg;
4583  reg_set_iterator rsi;
4584  EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4585    {
4586      if (reg < FIRST_PSEUDO_REGISTER
4587	  /* But don't do this if the reg actually serves as an output
4588	     reload reg in the current instruction.  */
4589	  && (n_reloads == 0
4590	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4591	  {
4592	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4593	    spill_reg_store[reg] = 0;
4594	  }
4595      if (n_reloads == 0
4596	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4597	reg_last_reload_reg[reg] = 0;
4598    }
4599}
4600
4601/* The following HARD_REG_SETs indicate when each hard register is
4602   used for a reload of various parts of the current insn.  */
4603
4604/* If reg is unavailable for all reloads.  */
4605static HARD_REG_SET reload_reg_unavailable;
4606/* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4607static HARD_REG_SET reload_reg_used;
4608/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4609static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4610/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4611static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4612/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4613static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4614/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4615static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4616/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4617static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4618/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4619static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4620/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4621static HARD_REG_SET reload_reg_used_in_op_addr;
4622/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4623static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4624/* If reg is in use for a RELOAD_FOR_INSN reload.  */
4625static HARD_REG_SET reload_reg_used_in_insn;
4626/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4627static HARD_REG_SET reload_reg_used_in_other_addr;
4628
4629/* If reg is in use as a reload reg for any sort of reload.  */
4630static HARD_REG_SET reload_reg_used_at_all;
4631
4632/* If reg is use as an inherited reload.  We just mark the first register
4633   in the group.  */
4634static HARD_REG_SET reload_reg_used_for_inherit;
4635
4636/* Records which hard regs are used in any way, either as explicit use or
4637   by being allocated to a pseudo during any point of the current insn.  */
4638static HARD_REG_SET reg_used_in_insn;
4639
4640/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4641   TYPE. MODE is used to indicate how many consecutive regs are
4642   actually used.  */
4643
4644static void
4645mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4646			enum machine_mode mode)
4647{
4648  unsigned int nregs = hard_regno_nregs[regno][mode];
4649  unsigned int i;
4650
4651  for (i = regno; i < nregs + regno; i++)
4652    {
4653      switch (type)
4654	{
4655	case RELOAD_OTHER:
4656	  SET_HARD_REG_BIT (reload_reg_used, i);
4657	  break;
4658
4659	case RELOAD_FOR_INPUT_ADDRESS:
4660	  SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4661	  break;
4662
4663	case RELOAD_FOR_INPADDR_ADDRESS:
4664	  SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4665	  break;
4666
4667	case RELOAD_FOR_OUTPUT_ADDRESS:
4668	  SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4669	  break;
4670
4671	case RELOAD_FOR_OUTADDR_ADDRESS:
4672	  SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4673	  break;
4674
4675	case RELOAD_FOR_OPERAND_ADDRESS:
4676	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4677	  break;
4678
4679	case RELOAD_FOR_OPADDR_ADDR:
4680	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4681	  break;
4682
4683	case RELOAD_FOR_OTHER_ADDRESS:
4684	  SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4685	  break;
4686
4687	case RELOAD_FOR_INPUT:
4688	  SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4689	  break;
4690
4691	case RELOAD_FOR_OUTPUT:
4692	  SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4693	  break;
4694
4695	case RELOAD_FOR_INSN:
4696	  SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4697	  break;
4698	}
4699
4700      SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4701    }
4702}
4703
4704/* Similarly, but show REGNO is no longer in use for a reload.  */
4705
4706static void
4707clear_reload_reg_in_use (unsigned int regno, int opnum,
4708			 enum reload_type type, enum machine_mode mode)
4709{
4710  unsigned int nregs = hard_regno_nregs[regno][mode];
4711  unsigned int start_regno, end_regno, r;
4712  int i;
4713  /* A complication is that for some reload types, inheritance might
4714     allow multiple reloads of the same types to share a reload register.
4715     We set check_opnum if we have to check only reloads with the same
4716     operand number, and check_any if we have to check all reloads.  */
4717  int check_opnum = 0;
4718  int check_any = 0;
4719  HARD_REG_SET *used_in_set;
4720
4721  switch (type)
4722    {
4723    case RELOAD_OTHER:
4724      used_in_set = &reload_reg_used;
4725      break;
4726
4727    case RELOAD_FOR_INPUT_ADDRESS:
4728      used_in_set = &reload_reg_used_in_input_addr[opnum];
4729      break;
4730
4731    case RELOAD_FOR_INPADDR_ADDRESS:
4732      check_opnum = 1;
4733      used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4734      break;
4735
4736    case RELOAD_FOR_OUTPUT_ADDRESS:
4737      used_in_set = &reload_reg_used_in_output_addr[opnum];
4738      break;
4739
4740    case RELOAD_FOR_OUTADDR_ADDRESS:
4741      check_opnum = 1;
4742      used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4743      break;
4744
4745    case RELOAD_FOR_OPERAND_ADDRESS:
4746      used_in_set = &reload_reg_used_in_op_addr;
4747      break;
4748
4749    case RELOAD_FOR_OPADDR_ADDR:
4750      check_any = 1;
4751      used_in_set = &reload_reg_used_in_op_addr_reload;
4752      break;
4753
4754    case RELOAD_FOR_OTHER_ADDRESS:
4755      used_in_set = &reload_reg_used_in_other_addr;
4756      check_any = 1;
4757      break;
4758
4759    case RELOAD_FOR_INPUT:
4760      used_in_set = &reload_reg_used_in_input[opnum];
4761      break;
4762
4763    case RELOAD_FOR_OUTPUT:
4764      used_in_set = &reload_reg_used_in_output[opnum];
4765      break;
4766
4767    case RELOAD_FOR_INSN:
4768      used_in_set = &reload_reg_used_in_insn;
4769      break;
4770    default:
4771      gcc_unreachable ();
4772    }
4773  /* We resolve conflicts with remaining reloads of the same type by
4774     excluding the intervals of reload registers by them from the
4775     interval of freed reload registers.  Since we only keep track of
4776     one set of interval bounds, we might have to exclude somewhat
4777     more than what would be necessary if we used a HARD_REG_SET here.
4778     But this should only happen very infrequently, so there should
4779     be no reason to worry about it.  */
4780
4781  start_regno = regno;
4782  end_regno = regno + nregs;
4783  if (check_opnum || check_any)
4784    {
4785      for (i = n_reloads - 1; i >= 0; i--)
4786	{
4787	  if (rld[i].when_needed == type
4788	      && (check_any || rld[i].opnum == opnum)
4789	      && rld[i].reg_rtx)
4790	    {
4791	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4792	      unsigned int conflict_end
4793		= end_hard_regno (rld[i].mode, conflict_start);
4794
4795	      /* If there is an overlap with the first to-be-freed register,
4796		 adjust the interval start.  */
4797	      if (conflict_start <= start_regno && conflict_end > start_regno)
4798		start_regno = conflict_end;
4799	      /* Otherwise, if there is a conflict with one of the other
4800		 to-be-freed registers, adjust the interval end.  */
4801	      if (conflict_start > start_regno && conflict_start < end_regno)
4802		end_regno = conflict_start;
4803	    }
4804	}
4805    }
4806
4807  for (r = start_regno; r < end_regno; r++)
4808    CLEAR_HARD_REG_BIT (*used_in_set, r);
4809}
4810
4811/* 1 if reg REGNO is free as a reload reg for a reload of the sort
4812   specified by OPNUM and TYPE.  */
4813
4814static int
4815reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
4816{
4817  int i;
4818
4819  /* In use for a RELOAD_OTHER means it's not available for anything.  */
4820  if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4821      || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4822    return 0;
4823
4824  switch (type)
4825    {
4826    case RELOAD_OTHER:
4827      /* In use for anything means we can't use it for RELOAD_OTHER.  */
4828      if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4829	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4830	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4831	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4832	return 0;
4833
4834      for (i = 0; i < reload_n_operands; i++)
4835	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4836	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4837	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4838	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4839	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4840	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4841	  return 0;
4842
4843      return 1;
4844
4845    case RELOAD_FOR_INPUT:
4846      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4847	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4848	return 0;
4849
4850      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4851	return 0;
4852
4853      /* If it is used for some other input, can't use it.  */
4854      for (i = 0; i < reload_n_operands; i++)
4855	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4856	  return 0;
4857
4858      /* If it is used in a later operand's address, can't use it.  */
4859      for (i = opnum + 1; i < reload_n_operands; i++)
4860	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4861	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4862	  return 0;
4863
4864      return 1;
4865
4866    case RELOAD_FOR_INPUT_ADDRESS:
4867      /* Can't use a register if it is used for an input address for this
4868	 operand or used as an input in an earlier one.  */
4869      if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4870	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4871	return 0;
4872
4873      for (i = 0; i < opnum; i++)
4874	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4875	  return 0;
4876
4877      return 1;
4878
4879    case RELOAD_FOR_INPADDR_ADDRESS:
4880      /* Can't use a register if it is used for an input address
4881	 for this operand or used as an input in an earlier
4882	 one.  */
4883      if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4884	return 0;
4885
4886      for (i = 0; i < opnum; i++)
4887	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4888	  return 0;
4889
4890      return 1;
4891
4892    case RELOAD_FOR_OUTPUT_ADDRESS:
4893      /* Can't use a register if it is used for an output address for this
4894	 operand or used as an output in this or a later operand.  Note
4895	 that multiple output operands are emitted in reverse order, so
4896	 the conflicting ones are those with lower indices.  */
4897      if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4898	return 0;
4899
4900      for (i = 0; i <= opnum; i++)
4901	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4902	  return 0;
4903
4904      return 1;
4905
4906    case RELOAD_FOR_OUTADDR_ADDRESS:
4907      /* Can't use a register if it is used for an output address
4908	 for this operand or used as an output in this or a
4909	 later operand.  Note that multiple output operands are
4910	 emitted in reverse order, so the conflicting ones are
4911	 those with lower indices.  */
4912      if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4913	return 0;
4914
4915      for (i = 0; i <= opnum; i++)
4916	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4917	  return 0;
4918
4919      return 1;
4920
4921    case RELOAD_FOR_OPERAND_ADDRESS:
4922      for (i = 0; i < reload_n_operands; i++)
4923	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4924	  return 0;
4925
4926      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4927	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4928
4929    case RELOAD_FOR_OPADDR_ADDR:
4930      for (i = 0; i < reload_n_operands; i++)
4931	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4932	  return 0;
4933
4934      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4935
4936    case RELOAD_FOR_OUTPUT:
4937      /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4938	 outputs, or an operand address for this or an earlier output.
4939	 Note that multiple output operands are emitted in reverse order,
4940	 so the conflicting ones are those with higher indices.  */
4941      if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4942	return 0;
4943
4944      for (i = 0; i < reload_n_operands; i++)
4945	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4946	  return 0;
4947
4948      for (i = opnum; i < reload_n_operands; i++)
4949	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4950	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4951	  return 0;
4952
4953      return 1;
4954
4955    case RELOAD_FOR_INSN:
4956      for (i = 0; i < reload_n_operands; i++)
4957	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4958	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4959	  return 0;
4960
4961      return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4962	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4963
4964    case RELOAD_FOR_OTHER_ADDRESS:
4965      return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4966
4967    default:
4968      gcc_unreachable ();
4969    }
4970}
4971
4972/* Return 1 if the value in reload reg REGNO, as used by a reload
4973   needed for the part of the insn specified by OPNUM and TYPE,
4974   is still available in REGNO at the end of the insn.
4975
4976   We can assume that the reload reg was already tested for availability
4977   at the time it is needed, and we should not check this again,
4978   in case the reg has already been marked in use.  */
4979
4980static int
4981reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
4982{
4983  int i;
4984
4985  switch (type)
4986    {
4987    case RELOAD_OTHER:
4988      /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4989	 its value must reach the end.  */
4990      return 1;
4991
4992      /* If this use is for part of the insn,
4993	 its value reaches if no subsequent part uses the same register.
4994	 Just like the above function, don't try to do this with lots
4995	 of fallthroughs.  */
4996
4997    case RELOAD_FOR_OTHER_ADDRESS:
4998      /* Here we check for everything else, since these don't conflict
4999	 with anything else and everything comes later.  */
5000
5001      for (i = 0; i < reload_n_operands; i++)
5002	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5003	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5004	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5005	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5006	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5007	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5008	  return 0;
5009
5010      return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5011	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5012	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5013	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5014
5015    case RELOAD_FOR_INPUT_ADDRESS:
5016    case RELOAD_FOR_INPADDR_ADDRESS:
5017      /* Similar, except that we check only for this and subsequent inputs
5018	 and the address of only subsequent inputs and we do not need
5019	 to check for RELOAD_OTHER objects since they are known not to
5020	 conflict.  */
5021
5022      for (i = opnum; i < reload_n_operands; i++)
5023	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5024	  return 0;
5025
5026      for (i = opnum + 1; i < reload_n_operands; i++)
5027	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5028	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5029	  return 0;
5030
5031      for (i = 0; i < reload_n_operands; i++)
5032	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5033	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5034	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5035	  return 0;
5036
5037      if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5038	return 0;
5039
5040      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5041	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5042	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5043
5044    case RELOAD_FOR_INPUT:
5045      /* Similar to input address, except we start at the next operand for
5046	 both input and input address and we do not check for
5047	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5048	 would conflict.  */
5049
5050      for (i = opnum + 1; i < reload_n_operands; i++)
5051	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5052	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5053	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5054	  return 0;
5055
5056      /* ... fall through ...  */
5057
5058    case RELOAD_FOR_OPERAND_ADDRESS:
5059      /* Check outputs and their addresses.  */
5060
5061      for (i = 0; i < reload_n_operands; i++)
5062	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5063	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5064	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5065	  return 0;
5066
5067      return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5068
5069    case RELOAD_FOR_OPADDR_ADDR:
5070      for (i = 0; i < reload_n_operands; i++)
5071	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5072	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5073	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5074	  return 0;
5075
5076      return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5077	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5078	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5079
5080    case RELOAD_FOR_INSN:
5081      /* These conflict with other outputs with RELOAD_OTHER.  So
5082	 we need only check for output addresses.  */
5083
5084      opnum = reload_n_operands;
5085
5086      /* ... fall through ...  */
5087
5088    case RELOAD_FOR_OUTPUT:
5089    case RELOAD_FOR_OUTPUT_ADDRESS:
5090    case RELOAD_FOR_OUTADDR_ADDRESS:
5091      /* We already know these can't conflict with a later output.  So the
5092	 only thing to check are later output addresses.
5093	 Note that multiple output operands are emitted in reverse order,
5094	 so the conflicting ones are those with lower indices.  */
5095      for (i = 0; i < opnum; i++)
5096	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5097	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5098	  return 0;
5099
5100      return 1;
5101
5102    default:
5103      gcc_unreachable ();
5104    }
5105}
5106
5107/* Like reload_reg_reaches_end_p, but check that the condition holds for
5108   every register in the range [REGNO, REGNO + NREGS).  */
5109
5110static bool
5111reload_regs_reach_end_p (unsigned int regno, int nregs,
5112			 int opnum, enum reload_type type)
5113{
5114  int i;
5115
5116  for (i = 0; i < nregs; i++)
5117    if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5118      return false;
5119  return true;
5120}
5121
5122
5123/*  Returns whether R1 and R2 are uniquely chained: the value of one
5124    is used by the other, and that value is not used by any other
5125    reload for this insn.  This is used to partially undo the decision
5126    made in find_reloads when in the case of multiple
5127    RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5128    RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5129    reloads.  This code tries to avoid the conflict created by that
5130    change.  It might be cleaner to explicitly keep track of which
5131    RELOAD_FOR_OPADDR_ADDR reload is associated with which
5132    RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5133    this after the fact. */
5134static bool
5135reloads_unique_chain_p (int r1, int r2)
5136{
5137  int i;
5138
5139  /* We only check input reloads.  */
5140  if (! rld[r1].in || ! rld[r2].in)
5141    return false;
5142
5143  /* Avoid anything with output reloads.  */
5144  if (rld[r1].out || rld[r2].out)
5145    return false;
5146
5147  /* "chained" means one reload is a component of the other reload,
5148     not the same as the other reload.  */
5149  if (rld[r1].opnum != rld[r2].opnum
5150      || rtx_equal_p (rld[r1].in, rld[r2].in)
5151      || rld[r1].optional || rld[r2].optional
5152      || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5153	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5154    return false;
5155
5156  for (i = 0; i < n_reloads; i ++)
5157    /* Look for input reloads that aren't our two */
5158    if (i != r1 && i != r2 && rld[i].in)
5159      {
5160	/* If our reload is mentioned at all, it isn't a simple chain.  */
5161	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5162	  return false;
5163      }
5164  return true;
5165}
5166
5167/* The recursive function change all occurrences of WHAT in *WHERE
5168   to REPL.  */
5169static void
5170substitute (rtx *where, const_rtx what, rtx repl)
5171{
5172  const char *fmt;
5173  int i;
5174  enum rtx_code code;
5175
5176  if (*where == 0)
5177    return;
5178
5179  if (*where == what || rtx_equal_p (*where, what))
5180    {
5181      /* Record the location of the changed rtx.  */
5182      VEC_safe_push (rtx_p, heap, substitute_stack, where);
5183      *where = repl;
5184      return;
5185    }
5186
5187  code = GET_CODE (*where);
5188  fmt = GET_RTX_FORMAT (code);
5189  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5190    {
5191      if (fmt[i] == 'E')
5192	{
5193	  int j;
5194
5195	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5196	    substitute (&XVECEXP (*where, i, j), what, repl);
5197	}
5198      else if (fmt[i] == 'e')
5199	substitute (&XEXP (*where, i), what, repl);
5200    }
5201}
5202
5203/* The function returns TRUE if chain of reload R1 and R2 (in any
5204   order) can be evaluated without usage of intermediate register for
5205   the reload containing another reload.  It is important to see
5206   gen_reload to understand what the function is trying to do.  As an
5207   example, let us have reload chain
5208
5209      r2: const
5210      r1: <something> + const
5211
5212   and reload R2 got reload reg HR.  The function returns true if
5213   there is a correct insn HR = HR + <something>.  Otherwise,
5214   gen_reload will use intermediate register (and this is the reload
5215   reg for R1) to reload <something>.
5216
5217   We need this function to find a conflict for chain reloads.  In our
5218   example, if HR = HR + <something> is incorrect insn, then we cannot
5219   use HR as a reload register for R2.  If we do use it then we get a
5220   wrong code:
5221
5222      HR = const
5223      HR = <something>
5224      HR = HR + HR
5225
5226*/
5227static bool
5228gen_reload_chain_without_interm_reg_p (int r1, int r2)
5229{
5230  /* Assume other cases in gen_reload are not possible for
5231     chain reloads or do need an intermediate hard registers.  */
5232  bool result = true;
5233  int regno, n, code;
5234  rtx out, in, tem, insn;
5235  rtx last = get_last_insn ();
5236
5237  /* Make r2 a component of r1.  */
5238  if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5239    {
5240      n = r1;
5241      r1 = r2;
5242      r2 = n;
5243    }
5244  gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5245  regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5246  gcc_assert (regno >= 0);
5247  out = gen_rtx_REG (rld[r1].mode, regno);
5248  in = rld[r1].in;
5249  substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5250
5251  /* If IN is a paradoxical SUBREG, remove it and try to put the
5252     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5253  if (GET_CODE (in) == SUBREG
5254      && (GET_MODE_SIZE (GET_MODE (in))
5255	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5256      && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5257    in = SUBREG_REG (in), out = tem;
5258
5259  if (GET_CODE (in) == PLUS
5260      && (REG_P (XEXP (in, 0))
5261	  || GET_CODE (XEXP (in, 0)) == SUBREG
5262	  || MEM_P (XEXP (in, 0)))
5263      && (REG_P (XEXP (in, 1))
5264	  || GET_CODE (XEXP (in, 1)) == SUBREG
5265	  || CONSTANT_P (XEXP (in, 1))
5266	  || MEM_P (XEXP (in, 1))))
5267    {
5268      insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5269      code = recog_memoized (insn);
5270      result = false;
5271
5272      if (code >= 0)
5273	{
5274	  extract_insn (insn);
5275	  /* We want constrain operands to treat this insn strictly in
5276	     its validity determination, i.e., the way it would after
5277	     reload has completed.  */
5278	  result = constrain_operands (1);
5279	}
5280
5281      delete_insns_since (last);
5282    }
5283
5284  /* Restore the original value at each changed address within R1.  */
5285  while (!VEC_empty (rtx_p, substitute_stack))
5286    {
5287      rtx *where = VEC_pop (rtx_p, substitute_stack);
5288      *where = rld[r2].in;
5289    }
5290
5291  return result;
5292}
5293
5294/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5295   Return 0 otherwise.
5296
5297   This function uses the same algorithm as reload_reg_free_p above.  */
5298
5299static int
5300reloads_conflict (int r1, int r2)
5301{
5302  enum reload_type r1_type = rld[r1].when_needed;
5303  enum reload_type r2_type = rld[r2].when_needed;
5304  int r1_opnum = rld[r1].opnum;
5305  int r2_opnum = rld[r2].opnum;
5306
5307  /* RELOAD_OTHER conflicts with everything.  */
5308  if (r2_type == RELOAD_OTHER)
5309    return 1;
5310
5311  /* Otherwise, check conflicts differently for each type.  */
5312
5313  switch (r1_type)
5314    {
5315    case RELOAD_FOR_INPUT:
5316      return (r2_type == RELOAD_FOR_INSN
5317	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5318	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5319	      || r2_type == RELOAD_FOR_INPUT
5320	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5321		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5322		  && r2_opnum > r1_opnum));
5323
5324    case RELOAD_FOR_INPUT_ADDRESS:
5325      return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5326	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5327
5328    case RELOAD_FOR_INPADDR_ADDRESS:
5329      return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5330	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5331
5332    case RELOAD_FOR_OUTPUT_ADDRESS:
5333      return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5334	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5335
5336    case RELOAD_FOR_OUTADDR_ADDRESS:
5337      return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5338	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5339
5340    case RELOAD_FOR_OPERAND_ADDRESS:
5341      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5342	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5343		  && (!reloads_unique_chain_p (r1, r2)
5344		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5345
5346    case RELOAD_FOR_OPADDR_ADDR:
5347      return (r2_type == RELOAD_FOR_INPUT
5348	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5349
5350    case RELOAD_FOR_OUTPUT:
5351      return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5352	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5353		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5354		  && r2_opnum >= r1_opnum));
5355
5356    case RELOAD_FOR_INSN:
5357      return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5358	      || r2_type == RELOAD_FOR_INSN
5359	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5360
5361    case RELOAD_FOR_OTHER_ADDRESS:
5362      return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5363
5364    case RELOAD_OTHER:
5365      return 1;
5366
5367    default:
5368      gcc_unreachable ();
5369    }
5370}
5371
5372/* Indexed by reload number, 1 if incoming value
5373   inherited from previous insns.  */
5374static char reload_inherited[MAX_RELOADS];
5375
5376/* For an inherited reload, this is the insn the reload was inherited from,
5377   if we know it.  Otherwise, this is 0.  */
5378static rtx reload_inheritance_insn[MAX_RELOADS];
5379
5380/* If nonzero, this is a place to get the value of the reload,
5381   rather than using reload_in.  */
5382static rtx reload_override_in[MAX_RELOADS];
5383
5384/* For each reload, the hard register number of the register used,
5385   or -1 if we did not need a register for this reload.  */
5386static int reload_spill_index[MAX_RELOADS];
5387
5388/* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5389static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5390
5391/* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5392static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5393
5394/* Subroutine of free_for_value_p, used to check a single register.
5395   START_REGNO is the starting regno of the full reload register
5396   (possibly comprising multiple hard registers) that we are considering.  */
5397
5398static int
5399reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5400			     enum reload_type type, rtx value, rtx out,
5401			     int reloadnum, int ignore_address_reloads)
5402{
5403  int time1;
5404  /* Set if we see an input reload that must not share its reload register
5405     with any new earlyclobber, but might otherwise share the reload
5406     register with an output or input-output reload.  */
5407  int check_earlyclobber = 0;
5408  int i;
5409  int copy = 0;
5410
5411  if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5412    return 0;
5413
5414  if (out == const0_rtx)
5415    {
5416      copy = 1;
5417      out = NULL_RTX;
5418    }
5419
5420  /* We use some pseudo 'time' value to check if the lifetimes of the
5421     new register use would overlap with the one of a previous reload
5422     that is not read-only or uses a different value.
5423     The 'time' used doesn't have to be linear in any shape or form, just
5424     monotonic.
5425     Some reload types use different 'buckets' for each operand.
5426     So there are MAX_RECOG_OPERANDS different time values for each
5427     such reload type.
5428     We compute TIME1 as the time when the register for the prospective
5429     new reload ceases to be live, and TIME2 for each existing
5430     reload as the time when that the reload register of that reload
5431     becomes live.
5432     Where there is little to be gained by exact lifetime calculations,
5433     we just make conservative assumptions, i.e. a longer lifetime;
5434     this is done in the 'default:' cases.  */
5435  switch (type)
5436    {
5437    case RELOAD_FOR_OTHER_ADDRESS:
5438      /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5439      time1 = copy ? 0 : 1;
5440      break;
5441    case RELOAD_OTHER:
5442      time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5443      break;
5444      /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5445	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5446	 respectively, to the time values for these, we get distinct time
5447	 values.  To get distinct time values for each operand, we have to
5448	 multiply opnum by at least three.  We round that up to four because
5449	 multiply by four is often cheaper.  */
5450    case RELOAD_FOR_INPADDR_ADDRESS:
5451      time1 = opnum * 4 + 2;
5452      break;
5453    case RELOAD_FOR_INPUT_ADDRESS:
5454      time1 = opnum * 4 + 3;
5455      break;
5456    case RELOAD_FOR_INPUT:
5457      /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5458	 executes (inclusive).  */
5459      time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5460      break;
5461    case RELOAD_FOR_OPADDR_ADDR:
5462      /* opnum * 4 + 4
5463	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5464      time1 = MAX_RECOG_OPERANDS * 4 + 1;
5465      break;
5466    case RELOAD_FOR_OPERAND_ADDRESS:
5467      /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5468	 is executed.  */
5469      time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5470      break;
5471    case RELOAD_FOR_OUTADDR_ADDRESS:
5472      time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5473      break;
5474    case RELOAD_FOR_OUTPUT_ADDRESS:
5475      time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5476      break;
5477    default:
5478      time1 = MAX_RECOG_OPERANDS * 5 + 5;
5479    }
5480
5481  for (i = 0; i < n_reloads; i++)
5482    {
5483      rtx reg = rld[i].reg_rtx;
5484      if (reg && REG_P (reg)
5485	  && ((unsigned) regno - true_regnum (reg)
5486	      <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5487	  && i != reloadnum)
5488	{
5489	  rtx other_input = rld[i].in;
5490
5491	  /* If the other reload loads the same input value, that
5492	     will not cause a conflict only if it's loading it into
5493	     the same register.  */
5494	  if (true_regnum (reg) != start_regno)
5495	    other_input = NULL_RTX;
5496	  if (! other_input || ! rtx_equal_p (other_input, value)
5497	      || rld[i].out || out)
5498	    {
5499	      int time2;
5500	      switch (rld[i].when_needed)
5501		{
5502		case RELOAD_FOR_OTHER_ADDRESS:
5503		  time2 = 0;
5504		  break;
5505		case RELOAD_FOR_INPADDR_ADDRESS:
5506		  /* find_reloads makes sure that a
5507		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5508		     by at most one - the first -
5509		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5510		     address reload is inherited, the address address reload
5511		     goes away, so we can ignore this conflict.  */
5512		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5513		      && ignore_address_reloads
5514		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5515			 Then the address address is still needed to store
5516			 back the new address.  */
5517		      && ! rld[reloadnum].out)
5518		    continue;
5519		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5520		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5521		     reloads go away.  */
5522		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5523		      && ignore_address_reloads
5524		      /* Unless we are reloading an auto_inc expression.  */
5525		      && ! rld[reloadnum].out)
5526		    continue;
5527		  time2 = rld[i].opnum * 4 + 2;
5528		  break;
5529		case RELOAD_FOR_INPUT_ADDRESS:
5530		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5531		      && ignore_address_reloads
5532		      && ! rld[reloadnum].out)
5533		    continue;
5534		  time2 = rld[i].opnum * 4 + 3;
5535		  break;
5536		case RELOAD_FOR_INPUT:
5537		  time2 = rld[i].opnum * 4 + 4;
5538		  check_earlyclobber = 1;
5539		  break;
5540		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5541		     == MAX_RECOG_OPERAND * 4  */
5542		case RELOAD_FOR_OPADDR_ADDR:
5543		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5544		      && ignore_address_reloads
5545		      && ! rld[reloadnum].out)
5546		    continue;
5547		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5548		  break;
5549		case RELOAD_FOR_OPERAND_ADDRESS:
5550		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5551		  check_earlyclobber = 1;
5552		  break;
5553		case RELOAD_FOR_INSN:
5554		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5555		  break;
5556		case RELOAD_FOR_OUTPUT:
5557		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5558		     instruction is executed.  */
5559		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5560		  break;
5561		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5562		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5563		     value.  */
5564		case RELOAD_FOR_OUTADDR_ADDRESS:
5565		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5566		      && ignore_address_reloads
5567		      && ! rld[reloadnum].out)
5568		    continue;
5569		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5570		  break;
5571		case RELOAD_FOR_OUTPUT_ADDRESS:
5572		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5573		  break;
5574		case RELOAD_OTHER:
5575		  /* If there is no conflict in the input part, handle this
5576		     like an output reload.  */
5577		  if (! rld[i].in || rtx_equal_p (other_input, value))
5578		    {
5579		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5580		      /* Earlyclobbered outputs must conflict with inputs.  */
5581		      if (earlyclobber_operand_p (rld[i].out))
5582			time2 = MAX_RECOG_OPERANDS * 4 + 3;
5583
5584		      break;
5585		    }
5586		  time2 = 1;
5587		  /* RELOAD_OTHER might be live beyond instruction execution,
5588		     but this is not obvious when we set time2 = 1.  So check
5589		     here if there might be a problem with the new reload
5590		     clobbering the register used by the RELOAD_OTHER.  */
5591		  if (out)
5592		    return 0;
5593		  break;
5594		default:
5595		  return 0;
5596		}
5597	      if ((time1 >= time2
5598		   && (! rld[i].in || rld[i].out
5599		       || ! rtx_equal_p (other_input, value)))
5600		  || (out && rld[reloadnum].out_reg
5601		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5602		return 0;
5603	    }
5604	}
5605    }
5606
5607  /* Earlyclobbered outputs must conflict with inputs.  */
5608  if (check_earlyclobber && out && earlyclobber_operand_p (out))
5609    return 0;
5610
5611  return 1;
5612}
5613
5614/* Return 1 if the value in reload reg REGNO, as used by a reload
5615   needed for the part of the insn specified by OPNUM and TYPE,
5616   may be used to load VALUE into it.
5617
5618   MODE is the mode in which the register is used, this is needed to
5619   determine how many hard regs to test.
5620
5621   Other read-only reloads with the same value do not conflict
5622   unless OUT is nonzero and these other reloads have to live while
5623   output reloads live.
5624   If OUT is CONST0_RTX, this is a special case: it means that the
5625   test should not be for using register REGNO as reload register, but
5626   for copying from register REGNO into the reload register.
5627
5628   RELOADNUM is the number of the reload we want to load this value for;
5629   a reload does not conflict with itself.
5630
5631   When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5632   reloads that load an address for the very reload we are considering.
5633
5634   The caller has to make sure that there is no conflict with the return
5635   register.  */
5636
5637static int
5638free_for_value_p (int regno, enum machine_mode mode, int opnum,
5639		  enum reload_type type, rtx value, rtx out, int reloadnum,
5640		  int ignore_address_reloads)
5641{
5642  int nregs = hard_regno_nregs[regno][mode];
5643  while (nregs-- > 0)
5644    if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5645				       value, out, reloadnum,
5646				       ignore_address_reloads))
5647      return 0;
5648  return 1;
5649}
5650
5651/* Return nonzero if the rtx X is invariant over the current function.  */
5652/* ??? Actually, the places where we use this expect exactly what is
5653   tested here, and not everything that is function invariant.  In
5654   particular, the frame pointer and arg pointer are special cased;
5655   pic_offset_table_rtx is not, and we must not spill these things to
5656   memory.  */
5657
5658int
5659function_invariant_p (const_rtx x)
5660{
5661  if (CONSTANT_P (x))
5662    return 1;
5663  if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5664    return 1;
5665  if (GET_CODE (x) == PLUS
5666      && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5667      && CONSTANT_P (XEXP (x, 1)))
5668    return 1;
5669  return 0;
5670}
5671
5672/* Determine whether the reload reg X overlaps any rtx'es used for
5673   overriding inheritance.  Return nonzero if so.  */
5674
5675static int
5676conflicts_with_override (rtx x)
5677{
5678  int i;
5679  for (i = 0; i < n_reloads; i++)
5680    if (reload_override_in[i]
5681	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
5682      return 1;
5683  return 0;
5684}
5685
5686/* Give an error message saying we failed to find a reload for INSN,
5687   and clear out reload R.  */
5688static void
5689failed_reload (rtx insn, int r)
5690{
5691  if (asm_noperands (PATTERN (insn)) < 0)
5692    /* It's the compiler's fault.  */
5693    fatal_insn ("could not find a spill register", insn);
5694
5695  /* It's the user's fault; the operand's mode and constraint
5696     don't match.  Disable this reload so we don't crash in final.  */
5697  error_for_asm (insn,
5698		 "%<asm%> operand constraint incompatible with operand size");
5699  rld[r].in = 0;
5700  rld[r].out = 0;
5701  rld[r].reg_rtx = 0;
5702  rld[r].optional = 1;
5703  rld[r].secondary_p = 1;
5704}
5705
5706/* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5707   for reload R.  If it's valid, get an rtx for it.  Return nonzero if
5708   successful.  */
5709static int
5710set_reload_reg (int i, int r)
5711{
5712  int regno;
5713  rtx reg = spill_reg_rtx[i];
5714
5715  if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5716    spill_reg_rtx[i] = reg
5717      = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5718
5719  regno = true_regnum (reg);
5720
5721  /* Detect when the reload reg can't hold the reload mode.
5722     This used to be one `if', but Sequent compiler can't handle that.  */
5723  if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5724    {
5725      enum machine_mode test_mode = VOIDmode;
5726      if (rld[r].in)
5727	test_mode = GET_MODE (rld[r].in);
5728      /* If rld[r].in has VOIDmode, it means we will load it
5729	 in whatever mode the reload reg has: to wit, rld[r].mode.
5730	 We have already tested that for validity.  */
5731      /* Aside from that, we need to test that the expressions
5732	 to reload from or into have modes which are valid for this
5733	 reload register.  Otherwise the reload insns would be invalid.  */
5734      if (! (rld[r].in != 0 && test_mode != VOIDmode
5735	     && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5736	if (! (rld[r].out != 0
5737	       && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5738	  {
5739	    /* The reg is OK.  */
5740	    last_spill_reg = i;
5741
5742	    /* Mark as in use for this insn the reload regs we use
5743	       for this.  */
5744	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5745				    rld[r].when_needed, rld[r].mode);
5746
5747	    rld[r].reg_rtx = reg;
5748	    reload_spill_index[r] = spill_regs[i];
5749	    return 1;
5750	  }
5751    }
5752  return 0;
5753}
5754
5755/* Find a spill register to use as a reload register for reload R.
5756   LAST_RELOAD is nonzero if this is the last reload for the insn being
5757   processed.
5758
5759   Set rld[R].reg_rtx to the register allocated.
5760
5761   We return 1 if successful, or 0 if we couldn't find a spill reg and
5762   we didn't change anything.  */
5763
5764static int
5765allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5766		     int last_reload)
5767{
5768  int i, pass, count;
5769
5770  /* If we put this reload ahead, thinking it is a group,
5771     then insist on finding a group.  Otherwise we can grab a
5772     reg that some other reload needs.
5773     (That can happen when we have a 68000 DATA_OR_FP_REG
5774     which is a group of data regs or one fp reg.)
5775     We need not be so restrictive if there are no more reloads
5776     for this insn.
5777
5778     ??? Really it would be nicer to have smarter handling
5779     for that kind of reg class, where a problem like this is normal.
5780     Perhaps those classes should be avoided for reloading
5781     by use of more alternatives.  */
5782
5783  int force_group = rld[r].nregs > 1 && ! last_reload;
5784
5785  /* If we want a single register and haven't yet found one,
5786     take any reg in the right class and not in use.
5787     If we want a consecutive group, here is where we look for it.
5788
5789     We use two passes so we can first look for reload regs to
5790     reuse, which are already in use for other reloads in this insn,
5791     and only then use additional registers.
5792     I think that maximizing reuse is needed to make sure we don't
5793     run out of reload regs.  Suppose we have three reloads, and
5794     reloads A and B can share regs.  These need two regs.
5795     Suppose A and B are given different regs.
5796     That leaves none for C.  */
5797  for (pass = 0; pass < 2; pass++)
5798    {
5799      /* I is the index in spill_regs.
5800	 We advance it round-robin between insns to use all spill regs
5801	 equally, so that inherited reloads have a chance
5802	 of leapfrogging each other.  */
5803
5804      i = last_spill_reg;
5805
5806      for (count = 0; count < n_spills; count++)
5807	{
5808	  int rclass = (int) rld[r].rclass;
5809	  int regnum;
5810
5811	  i++;
5812	  if (i >= n_spills)
5813	    i -= n_spills;
5814	  regnum = spill_regs[i];
5815
5816	  if ((reload_reg_free_p (regnum, rld[r].opnum,
5817				  rld[r].when_needed)
5818	       || (rld[r].in
5819		   /* We check reload_reg_used to make sure we
5820		      don't clobber the return register.  */
5821		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5822		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5823					rld[r].when_needed, rld[r].in,
5824					rld[r].out, r, 1)))
5825	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
5826	      && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5827	      /* Look first for regs to share, then for unshared.  But
5828		 don't share regs used for inherited reloads; they are
5829		 the ones we want to preserve.  */
5830	      && (pass
5831		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5832					 regnum)
5833		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5834					      regnum))))
5835	    {
5836	      int nr = hard_regno_nregs[regnum][rld[r].mode];
5837	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5838		 (on 68000) got us two FP regs.  If NR is 1,
5839		 we would reject both of them.  */
5840	      if (force_group)
5841		nr = rld[r].nregs;
5842	      /* If we need only one reg, we have already won.  */
5843	      if (nr == 1)
5844		{
5845		  /* But reject a single reg if we demand a group.  */
5846		  if (force_group)
5847		    continue;
5848		  break;
5849		}
5850	      /* Otherwise check that as many consecutive regs as we need
5851		 are available here.  */
5852	      while (nr > 1)
5853		{
5854		  int regno = regnum + nr - 1;
5855		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
5856			&& spill_reg_order[regno] >= 0
5857			&& reload_reg_free_p (regno, rld[r].opnum,
5858					      rld[r].when_needed)))
5859		    break;
5860		  nr--;
5861		}
5862	      if (nr == 1)
5863		break;
5864	    }
5865	}
5866
5867      /* If we found something on pass 1, omit pass 2.  */
5868      if (count < n_spills)
5869	break;
5870    }
5871
5872  /* We should have found a spill register by now.  */
5873  if (count >= n_spills)
5874    return 0;
5875
5876  /* I is the index in SPILL_REG_RTX of the reload register we are to
5877     allocate.  Get an rtx for it and find its register number.  */
5878
5879  return set_reload_reg (i, r);
5880}
5881
5882/* Initialize all the tables needed to allocate reload registers.
5883   CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5884   is the array we use to restore the reg_rtx field for every reload.  */
5885
5886static void
5887choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
5888{
5889  int i;
5890
5891  for (i = 0; i < n_reloads; i++)
5892    rld[i].reg_rtx = save_reload_reg_rtx[i];
5893
5894  memset (reload_inherited, 0, MAX_RELOADS);
5895  memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5896  memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5897
5898  CLEAR_HARD_REG_SET (reload_reg_used);
5899  CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5900  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5901  CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5902  CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5903  CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5904
5905  CLEAR_HARD_REG_SET (reg_used_in_insn);
5906  {
5907    HARD_REG_SET tmp;
5908    REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5909    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5910    REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5911    IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5912    compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5913    compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5914  }
5915
5916  for (i = 0; i < reload_n_operands; i++)
5917    {
5918      CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5919      CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5920      CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5921      CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5922      CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5923      CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5924    }
5925
5926  COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5927
5928  CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5929
5930  for (i = 0; i < n_reloads; i++)
5931    /* If we have already decided to use a certain register,
5932       don't use it in another way.  */
5933    if (rld[i].reg_rtx)
5934      mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5935			      rld[i].when_needed, rld[i].mode);
5936}
5937
5938/* Assign hard reg targets for the pseudo-registers we must reload
5939   into hard regs for this insn.
5940   Also output the instructions to copy them in and out of the hard regs.
5941
5942   For machines with register classes, we are responsible for
5943   finding a reload reg in the proper class.  */
5944
5945static void
5946choose_reload_regs (struct insn_chain *chain)
5947{
5948  rtx insn = chain->insn;
5949  int i, j;
5950  unsigned int max_group_size = 1;
5951  enum reg_class group_class = NO_REGS;
5952  int pass, win, inheritance;
5953
5954  rtx save_reload_reg_rtx[MAX_RELOADS];
5955
5956  /* In order to be certain of getting the registers we need,
5957     we must sort the reloads into order of increasing register class.
5958     Then our grabbing of reload registers will parallel the process
5959     that provided the reload registers.
5960
5961     Also note whether any of the reloads wants a consecutive group of regs.
5962     If so, record the maximum size of the group desired and what
5963     register class contains all the groups needed by this insn.  */
5964
5965  for (j = 0; j < n_reloads; j++)
5966    {
5967      reload_order[j] = j;
5968      if (rld[j].reg_rtx != NULL_RTX)
5969	{
5970	  gcc_assert (REG_P (rld[j].reg_rtx)
5971		      && HARD_REGISTER_P (rld[j].reg_rtx));
5972	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
5973	}
5974      else
5975	reload_spill_index[j] = -1;
5976
5977      if (rld[j].nregs > 1)
5978	{
5979	  max_group_size = MAX (rld[j].nregs, max_group_size);
5980	  group_class
5981	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
5982	}
5983
5984      save_reload_reg_rtx[j] = rld[j].reg_rtx;
5985    }
5986
5987  if (n_reloads > 1)
5988    qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5989
5990  /* If -O, try first with inheritance, then turning it off.
5991     If not -O, don't do inheritance.
5992     Using inheritance when not optimizing leads to paradoxes
5993     with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5994     because one side of the comparison might be inherited.  */
5995  win = 0;
5996  for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5997    {
5998      choose_reload_regs_init (chain, save_reload_reg_rtx);
5999
6000      /* Process the reloads in order of preference just found.
6001	 Beyond this point, subregs can be found in reload_reg_rtx.
6002
6003	 This used to look for an existing reloaded home for all of the
6004	 reloads, and only then perform any new reloads.  But that could lose
6005	 if the reloads were done out of reg-class order because a later
6006	 reload with a looser constraint might have an old home in a register
6007	 needed by an earlier reload with a tighter constraint.
6008
6009	 To solve this, we make two passes over the reloads, in the order
6010	 described above.  In the first pass we try to inherit a reload
6011	 from a previous insn.  If there is a later reload that needs a
6012	 class that is a proper subset of the class being processed, we must
6013	 also allocate a spill register during the first pass.
6014
6015	 Then make a second pass over the reloads to allocate any reloads
6016	 that haven't been given registers yet.  */
6017
6018      for (j = 0; j < n_reloads; j++)
6019	{
6020	  int r = reload_order[j];
6021	  rtx search_equiv = NULL_RTX;
6022
6023	  /* Ignore reloads that got marked inoperative.  */
6024	  if (rld[r].out == 0 && rld[r].in == 0
6025	      && ! rld[r].secondary_p)
6026	    continue;
6027
6028	  /* If find_reloads chose to use reload_in or reload_out as a reload
6029	     register, we don't need to chose one.  Otherwise, try even if it
6030	     found one since we might save an insn if we find the value lying
6031	     around.
6032	     Try also when reload_in is a pseudo without a hard reg.  */
6033	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6034	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6035		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6036		      && !MEM_P (rld[r].in)
6037		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6038	    continue;
6039
6040#if 0 /* No longer needed for correct operation.
6041	 It might give better code, or might not; worth an experiment?  */
6042	  /* If this is an optional reload, we can't inherit from earlier insns
6043	     until we are sure that any non-optional reloads have been allocated.
6044	     The following code takes advantage of the fact that optional reloads
6045	     are at the end of reload_order.  */
6046	  if (rld[r].optional != 0)
6047	    for (i = 0; i < j; i++)
6048	      if ((rld[reload_order[i]].out != 0
6049		   || rld[reload_order[i]].in != 0
6050		   || rld[reload_order[i]].secondary_p)
6051		  && ! rld[reload_order[i]].optional
6052		  && rld[reload_order[i]].reg_rtx == 0)
6053		allocate_reload_reg (chain, reload_order[i], 0);
6054#endif
6055
6056	  /* First see if this pseudo is already available as reloaded
6057	     for a previous insn.  We cannot try to inherit for reloads
6058	     that are smaller than the maximum number of registers needed
6059	     for groups unless the register we would allocate cannot be used
6060	     for the groups.
6061
6062	     We could check here to see if this is a secondary reload for
6063	     an object that is already in a register of the desired class.
6064	     This would avoid the need for the secondary reload register.
6065	     But this is complex because we can't easily determine what
6066	     objects might want to be loaded via this reload.  So let a
6067	     register be allocated here.  In `emit_reload_insns' we suppress
6068	     one of the loads in the case described above.  */
6069
6070	  if (inheritance)
6071	    {
6072	      int byte = 0;
6073	      int regno = -1;
6074	      enum machine_mode mode = VOIDmode;
6075
6076	      if (rld[r].in == 0)
6077		;
6078	      else if (REG_P (rld[r].in))
6079		{
6080		  regno = REGNO (rld[r].in);
6081		  mode = GET_MODE (rld[r].in);
6082		}
6083	      else if (REG_P (rld[r].in_reg))
6084		{
6085		  regno = REGNO (rld[r].in_reg);
6086		  mode = GET_MODE (rld[r].in_reg);
6087		}
6088	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6089		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6090		{
6091		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6092		  if (regno < FIRST_PSEUDO_REGISTER)
6093		    regno = subreg_regno (rld[r].in_reg);
6094		  else
6095		    byte = SUBREG_BYTE (rld[r].in_reg);
6096		  mode = GET_MODE (rld[r].in_reg);
6097		}
6098#ifdef AUTO_INC_DEC
6099	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6100		       && REG_P (XEXP (rld[r].in_reg, 0)))
6101		{
6102		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6103		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6104		  rld[r].out = rld[r].in;
6105		}
6106#endif
6107#if 0
6108	      /* This won't work, since REGNO can be a pseudo reg number.
6109		 Also, it takes much more hair to keep track of all the things
6110		 that can invalidate an inherited reload of part of a pseudoreg.  */
6111	      else if (GET_CODE (rld[r].in) == SUBREG
6112		       && REG_P (SUBREG_REG (rld[r].in)))
6113		regno = subreg_regno (rld[r].in);
6114#endif
6115
6116	      if (regno >= 0
6117		  && reg_last_reload_reg[regno] != 0
6118#ifdef CANNOT_CHANGE_MODE_CLASS
6119		  /* Verify that the register it's in can be used in
6120		     mode MODE.  */
6121		  && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6122						GET_MODE (reg_last_reload_reg[regno]),
6123						mode)
6124#endif
6125		  )
6126		{
6127		  enum reg_class rclass = rld[r].rclass, last_class;
6128		  rtx last_reg = reg_last_reload_reg[regno];
6129		  enum machine_mode need_mode;
6130
6131		  i = REGNO (last_reg);
6132		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6133		  last_class = REGNO_REG_CLASS (i);
6134
6135		  if (byte == 0)
6136		    need_mode = mode;
6137		  else
6138		    need_mode
6139		      = smallest_mode_for_size
6140		        (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6141			 GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6142			 ? MODE_INT : GET_MODE_CLASS (mode));
6143
6144		  if ((GET_MODE_SIZE (GET_MODE (last_reg))
6145		       >= GET_MODE_SIZE (need_mode))
6146		      && reg_reloaded_contents[i] == regno
6147		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6148		      && HARD_REGNO_MODE_OK (i, rld[r].mode)
6149		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6150			  /* Even if we can't use this register as a reload
6151			     register, we might use it for reload_override_in,
6152			     if copying it to the desired class is cheap
6153			     enough.  */
6154			  || ((REGISTER_MOVE_COST (mode, last_class, rclass)
6155			       < MEMORY_MOVE_COST (mode, rclass, 1))
6156			      && (secondary_reload_class (1, rclass, mode,
6157							  last_reg)
6158				  == NO_REGS)
6159#ifdef SECONDARY_MEMORY_NEEDED
6160			      && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6161							    mode)
6162#endif
6163			      ))
6164
6165		      && (rld[r].nregs == max_group_size
6166			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6167						  i))
6168		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6169					   rld[r].when_needed, rld[r].in,
6170					   const0_rtx, r, 1))
6171		    {
6172		      /* If a group is needed, verify that all the subsequent
6173			 registers still have their values intact.  */
6174		      int nr = hard_regno_nregs[i][rld[r].mode];
6175		      int k;
6176
6177		      for (k = 1; k < nr; k++)
6178			if (reg_reloaded_contents[i + k] != regno
6179			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6180			  break;
6181
6182		      if (k == nr)
6183			{
6184			  int i1;
6185			  int bad_for_class;
6186
6187			  last_reg = (GET_MODE (last_reg) == mode
6188				      ? last_reg : gen_rtx_REG (mode, i));
6189
6190			  bad_for_class = 0;
6191			  for (k = 0; k < nr; k++)
6192			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6193								  i+k);
6194
6195			  /* We found a register that contains the
6196			     value we need.  If this register is the
6197			     same as an `earlyclobber' operand of the
6198			     current insn, just mark it as a place to
6199			     reload from since we can't use it as the
6200			     reload register itself.  */
6201
6202			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6203			    if (reg_overlap_mentioned_for_reload_p
6204				(reg_last_reload_reg[regno],
6205				 reload_earlyclobbers[i1]))
6206			      break;
6207
6208			  if (i1 != n_earlyclobbers
6209			      || ! (free_for_value_p (i, rld[r].mode,
6210						      rld[r].opnum,
6211						      rld[r].when_needed, rld[r].in,
6212						      rld[r].out, r, 1))
6213			      /* Don't use it if we'd clobber a pseudo reg.  */
6214			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6215				  && rld[r].out
6216				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6217			      /* Don't clobber the frame pointer.  */
6218			      || (i == HARD_FRAME_POINTER_REGNUM
6219				  && frame_pointer_needed
6220				  && rld[r].out)
6221			      /* Don't really use the inherited spill reg
6222				 if we need it wider than we've got it.  */
6223			      || (GET_MODE_SIZE (rld[r].mode)
6224				  > GET_MODE_SIZE (mode))
6225			      || bad_for_class
6226
6227			      /* If find_reloads chose reload_out as reload
6228				 register, stay with it - that leaves the
6229				 inherited register for subsequent reloads.  */
6230			      || (rld[r].out && rld[r].reg_rtx
6231				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6232			    {
6233			      if (! rld[r].optional)
6234				{
6235				  reload_override_in[r] = last_reg;
6236				  reload_inheritance_insn[r]
6237				    = reg_reloaded_insn[i];
6238				}
6239			    }
6240			  else
6241			    {
6242			      int k;
6243			      /* We can use this as a reload reg.  */
6244			      /* Mark the register as in use for this part of
6245				 the insn.  */
6246			      mark_reload_reg_in_use (i,
6247						      rld[r].opnum,
6248						      rld[r].when_needed,
6249						      rld[r].mode);
6250			      rld[r].reg_rtx = last_reg;
6251			      reload_inherited[r] = 1;
6252			      reload_inheritance_insn[r]
6253				= reg_reloaded_insn[i];
6254			      reload_spill_index[r] = i;
6255			      for (k = 0; k < nr; k++)
6256				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6257						  i + k);
6258			    }
6259			}
6260		    }
6261		}
6262	    }
6263
6264	  /* Here's another way to see if the value is already lying around.  */
6265	  if (inheritance
6266	      && rld[r].in != 0
6267	      && ! reload_inherited[r]
6268	      && rld[r].out == 0
6269	      && (CONSTANT_P (rld[r].in)
6270		  || GET_CODE (rld[r].in) == PLUS
6271		  || REG_P (rld[r].in)
6272		  || MEM_P (rld[r].in))
6273	      && (rld[r].nregs == max_group_size
6274		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6275	    search_equiv = rld[r].in;
6276
6277	  if (search_equiv)
6278	    {
6279	      rtx equiv
6280		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6281				  -1, NULL, 0, rld[r].mode);
6282	      int regno = 0;
6283
6284	      if (equiv != 0)
6285		{
6286		  if (REG_P (equiv))
6287		    regno = REGNO (equiv);
6288		  else
6289		    {
6290		      /* This must be a SUBREG of a hard register.
6291			 Make a new REG since this might be used in an
6292			 address and not all machines support SUBREGs
6293			 there.  */
6294		      gcc_assert (GET_CODE (equiv) == SUBREG);
6295		      regno = subreg_regno (equiv);
6296		      equiv = gen_rtx_REG (rld[r].mode, regno);
6297		      /* If we choose EQUIV as the reload register, but the
6298			 loop below decides to cancel the inheritance, we'll
6299			 end up reloading EQUIV in rld[r].mode, not the mode
6300			 it had originally.  That isn't safe when EQUIV isn't
6301			 available as a spill register since its value might
6302			 still be live at this point.  */
6303		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6304			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6305			  equiv = 0;
6306		    }
6307		}
6308
6309	      /* If we found a spill reg, reject it unless it is free
6310		 and of the desired class.  */
6311	      if (equiv != 0)
6312		{
6313		  int regs_used = 0;
6314		  int bad_for_class = 0;
6315		  int max_regno = regno + rld[r].nregs;
6316
6317		  for (i = regno; i < max_regno; i++)
6318		    {
6319		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6320						      i);
6321		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6322							   i);
6323		    }
6324
6325		  if ((regs_used
6326		       && ! free_for_value_p (regno, rld[r].mode,
6327					      rld[r].opnum, rld[r].when_needed,
6328					      rld[r].in, rld[r].out, r, 1))
6329		      || bad_for_class)
6330		    equiv = 0;
6331		}
6332
6333	      if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6334		equiv = 0;
6335
6336	      /* We found a register that contains the value we need.
6337		 If this register is the same as an `earlyclobber' operand
6338		 of the current insn, just mark it as a place to reload from
6339		 since we can't use it as the reload register itself.  */
6340
6341	      if (equiv != 0)
6342		for (i = 0; i < n_earlyclobbers; i++)
6343		  if (reg_overlap_mentioned_for_reload_p (equiv,
6344							  reload_earlyclobbers[i]))
6345		    {
6346		      if (! rld[r].optional)
6347			reload_override_in[r] = equiv;
6348		      equiv = 0;
6349		      break;
6350		    }
6351
6352	      /* If the equiv register we have found is explicitly clobbered
6353		 in the current insn, it depends on the reload type if we
6354		 can use it, use it for reload_override_in, or not at all.
6355		 In particular, we then can't use EQUIV for a
6356		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6357
6358	      if (equiv != 0)
6359		{
6360		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6361		    switch (rld[r].when_needed)
6362		      {
6363		      case RELOAD_FOR_OTHER_ADDRESS:
6364		      case RELOAD_FOR_INPADDR_ADDRESS:
6365		      case RELOAD_FOR_INPUT_ADDRESS:
6366		      case RELOAD_FOR_OPADDR_ADDR:
6367			break;
6368		      case RELOAD_OTHER:
6369		      case RELOAD_FOR_INPUT:
6370		      case RELOAD_FOR_OPERAND_ADDRESS:
6371			if (! rld[r].optional)
6372			  reload_override_in[r] = equiv;
6373			/* Fall through.  */
6374		      default:
6375			equiv = 0;
6376			break;
6377		      }
6378		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6379		    switch (rld[r].when_needed)
6380		      {
6381		      case RELOAD_FOR_OTHER_ADDRESS:
6382		      case RELOAD_FOR_INPADDR_ADDRESS:
6383		      case RELOAD_FOR_INPUT_ADDRESS:
6384		      case RELOAD_FOR_OPADDR_ADDR:
6385		      case RELOAD_FOR_OPERAND_ADDRESS:
6386		      case RELOAD_FOR_INPUT:
6387			break;
6388		      case RELOAD_OTHER:
6389			if (! rld[r].optional)
6390			  reload_override_in[r] = equiv;
6391			/* Fall through.  */
6392		      default:
6393			equiv = 0;
6394			break;
6395		      }
6396		}
6397
6398	      /* If we found an equivalent reg, say no code need be generated
6399		 to load it, and use it as our reload reg.  */
6400	      if (equiv != 0
6401		  && (regno != HARD_FRAME_POINTER_REGNUM
6402		      || !frame_pointer_needed))
6403		{
6404		  int nr = hard_regno_nregs[regno][rld[r].mode];
6405		  int k;
6406		  rld[r].reg_rtx = equiv;
6407		  reload_spill_index[r] = regno;
6408		  reload_inherited[r] = 1;
6409
6410		  /* If reg_reloaded_valid is not set for this register,
6411		     there might be a stale spill_reg_store lying around.
6412		     We must clear it, since otherwise emit_reload_insns
6413		     might delete the store.  */
6414		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6415		    spill_reg_store[regno] = NULL_RTX;
6416		  /* If any of the hard registers in EQUIV are spill
6417		     registers, mark them as in use for this insn.  */
6418		  for (k = 0; k < nr; k++)
6419		    {
6420		      i = spill_reg_order[regno + k];
6421		      if (i >= 0)
6422			{
6423			  mark_reload_reg_in_use (regno, rld[r].opnum,
6424						  rld[r].when_needed,
6425						  rld[r].mode);
6426			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6427					    regno + k);
6428			}
6429		    }
6430		}
6431	    }
6432
6433	  /* If we found a register to use already, or if this is an optional
6434	     reload, we are done.  */
6435	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6436	    continue;
6437
6438#if 0
6439	  /* No longer needed for correct operation.  Might or might
6440	     not give better code on the average.  Want to experiment?  */
6441
6442	  /* See if there is a later reload that has a class different from our
6443	     class that intersects our class or that requires less register
6444	     than our reload.  If so, we must allocate a register to this
6445	     reload now, since that reload might inherit a previous reload
6446	     and take the only available register in our class.  Don't do this
6447	     for optional reloads since they will force all previous reloads
6448	     to be allocated.  Also don't do this for reloads that have been
6449	     turned off.  */
6450
6451	  for (i = j + 1; i < n_reloads; i++)
6452	    {
6453	      int s = reload_order[i];
6454
6455	      if ((rld[s].in == 0 && rld[s].out == 0
6456		   && ! rld[s].secondary_p)
6457		  || rld[s].optional)
6458		continue;
6459
6460	      if ((rld[s].rclass != rld[r].rclass
6461		   && reg_classes_intersect_p (rld[r].rclass,
6462					       rld[s].rclass))
6463		  || rld[s].nregs < rld[r].nregs)
6464		break;
6465	    }
6466
6467	  if (i == n_reloads)
6468	    continue;
6469
6470	  allocate_reload_reg (chain, r, j == n_reloads - 1);
6471#endif
6472	}
6473
6474      /* Now allocate reload registers for anything non-optional that
6475	 didn't get one yet.  */
6476      for (j = 0; j < n_reloads; j++)
6477	{
6478	  int r = reload_order[j];
6479
6480	  /* Ignore reloads that got marked inoperative.  */
6481	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6482	    continue;
6483
6484	  /* Skip reloads that already have a register allocated or are
6485	     optional.  */
6486	  if (rld[r].reg_rtx != 0 || rld[r].optional)
6487	    continue;
6488
6489	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6490	    break;
6491	}
6492
6493      /* If that loop got all the way, we have won.  */
6494      if (j == n_reloads)
6495	{
6496	  win = 1;
6497	  break;
6498	}
6499
6500      /* Loop around and try without any inheritance.  */
6501    }
6502
6503  if (! win)
6504    {
6505      /* First undo everything done by the failed attempt
6506	 to allocate with inheritance.  */
6507      choose_reload_regs_init (chain, save_reload_reg_rtx);
6508
6509      /* Some sanity tests to verify that the reloads found in the first
6510	 pass are identical to the ones we have now.  */
6511      gcc_assert (chain->n_reloads == n_reloads);
6512
6513      for (i = 0; i < n_reloads; i++)
6514	{
6515	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6516	    continue;
6517	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6518	  for (j = 0; j < n_spills; j++)
6519	    if (spill_regs[j] == chain->rld[i].regno)
6520	      if (! set_reload_reg (j, i))
6521		failed_reload (chain->insn, i);
6522	}
6523    }
6524
6525  /* If we thought we could inherit a reload, because it seemed that
6526     nothing else wanted the same reload register earlier in the insn,
6527     verify that assumption, now that all reloads have been assigned.
6528     Likewise for reloads where reload_override_in has been set.  */
6529
6530  /* If doing expensive optimizations, do one preliminary pass that doesn't
6531     cancel any inheritance, but removes reloads that have been needed only
6532     for reloads that we know can be inherited.  */
6533  for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6534    {
6535      for (j = 0; j < n_reloads; j++)
6536	{
6537	  int r = reload_order[j];
6538	  rtx check_reg;
6539	  if (reload_inherited[r] && rld[r].reg_rtx)
6540	    check_reg = rld[r].reg_rtx;
6541	  else if (reload_override_in[r]
6542		   && (REG_P (reload_override_in[r])
6543		       || GET_CODE (reload_override_in[r]) == SUBREG))
6544	    check_reg = reload_override_in[r];
6545	  else
6546	    continue;
6547	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6548				  rld[r].opnum, rld[r].when_needed, rld[r].in,
6549				  (reload_inherited[r]
6550				   ? rld[r].out : const0_rtx),
6551				  r, 1))
6552	    {
6553	      if (pass)
6554		continue;
6555	      reload_inherited[r] = 0;
6556	      reload_override_in[r] = 0;
6557	    }
6558	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6559	     reload_override_in, then we do not need its related
6560	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6561	     likewise for other reload types.
6562	     We handle this by removing a reload when its only replacement
6563	     is mentioned in reload_in of the reload we are going to inherit.
6564	     A special case are auto_inc expressions; even if the input is
6565	     inherited, we still need the address for the output.  We can
6566	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
6567	     If we succeeded removing some reload and we are doing a preliminary
6568	     pass just to remove such reloads, make another pass, since the
6569	     removal of one reload might allow us to inherit another one.  */
6570	  else if (rld[r].in
6571		   && rld[r].out != rld[r].in
6572		   && remove_address_replacements (rld[r].in) && pass)
6573	    pass = 2;
6574	}
6575    }
6576
6577  /* Now that reload_override_in is known valid,
6578     actually override reload_in.  */
6579  for (j = 0; j < n_reloads; j++)
6580    if (reload_override_in[j])
6581      rld[j].in = reload_override_in[j];
6582
6583  /* If this reload won't be done because it has been canceled or is
6584     optional and not inherited, clear reload_reg_rtx so other
6585     routines (such as subst_reloads) don't get confused.  */
6586  for (j = 0; j < n_reloads; j++)
6587    if (rld[j].reg_rtx != 0
6588	&& ((rld[j].optional && ! reload_inherited[j])
6589	    || (rld[j].in == 0 && rld[j].out == 0
6590		&& ! rld[j].secondary_p)))
6591      {
6592	int regno = true_regnum (rld[j].reg_rtx);
6593
6594	if (spill_reg_order[regno] >= 0)
6595	  clear_reload_reg_in_use (regno, rld[j].opnum,
6596				   rld[j].when_needed, rld[j].mode);
6597	rld[j].reg_rtx = 0;
6598	reload_spill_index[j] = -1;
6599      }
6600
6601  /* Record which pseudos and which spill regs have output reloads.  */
6602  for (j = 0; j < n_reloads; j++)
6603    {
6604      int r = reload_order[j];
6605
6606      i = reload_spill_index[r];
6607
6608      /* I is nonneg if this reload uses a register.
6609	 If rld[r].reg_rtx is 0, this is an optional reload
6610	 that we opted to ignore.  */
6611      if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6612	  && rld[r].reg_rtx != 0)
6613	{
6614	  int nregno = REGNO (rld[r].out_reg);
6615	  int nr = 1;
6616
6617	  if (nregno < FIRST_PSEUDO_REGISTER)
6618	    nr = hard_regno_nregs[nregno][rld[r].mode];
6619
6620	  while (--nr >= 0)
6621	    SET_REGNO_REG_SET (&reg_has_output_reload,
6622			       nregno + nr);
6623
6624	  if (i >= 0)
6625	    {
6626	      nr = hard_regno_nregs[i][rld[r].mode];
6627	      while (--nr >= 0)
6628		SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6629	    }
6630
6631	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
6632		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
6633		      || rld[r].when_needed == RELOAD_FOR_INSN);
6634	}
6635    }
6636}
6637
6638/* Deallocate the reload register for reload R.  This is called from
6639   remove_address_replacements.  */
6640
6641void
6642deallocate_reload_reg (int r)
6643{
6644  int regno;
6645
6646  if (! rld[r].reg_rtx)
6647    return;
6648  regno = true_regnum (rld[r].reg_rtx);
6649  rld[r].reg_rtx = 0;
6650  if (spill_reg_order[regno] >= 0)
6651    clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6652			     rld[r].mode);
6653  reload_spill_index[r] = -1;
6654}
6655
6656/* These arrays are filled by emit_reload_insns and its subroutines.  */
6657static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6658static rtx other_input_address_reload_insns = 0;
6659static rtx other_input_reload_insns = 0;
6660static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6661static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6662static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6663static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6664static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6665static rtx operand_reload_insns = 0;
6666static rtx other_operand_reload_insns = 0;
6667static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6668
6669/* Values to be put in spill_reg_store are put here first.  */
6670static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6671static HARD_REG_SET reg_reloaded_died;
6672
6673/* Check if *RELOAD_REG is suitable as an intermediate or scratch register
6674   of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
6675   is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6676   adjusted register, and return true.  Otherwise, return false.  */
6677static bool
6678reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
6679			    enum reg_class new_class,
6680			    enum machine_mode new_mode)
6681
6682{
6683  rtx reg;
6684
6685  for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
6686    {
6687      unsigned regno = REGNO (reg);
6688
6689      if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
6690	continue;
6691      if (GET_MODE (reg) != new_mode)
6692	{
6693	  if (!HARD_REGNO_MODE_OK (regno, new_mode))
6694	    continue;
6695	  if (hard_regno_nregs[regno][new_mode]
6696	      > hard_regno_nregs[regno][GET_MODE (reg)])
6697	    continue;
6698	  reg = reload_adjust_reg_for_mode (reg, new_mode);
6699	}
6700      *reload_reg = reg;
6701      return true;
6702    }
6703  return false;
6704}
6705
6706/* Check if *RELOAD_REG is suitable as a scratch register for the reload
6707   pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
6708   nonzero, if that is suitable.  On success, change *RELOAD_REG to the
6709   adjusted register, and return true.  Otherwise, return false.  */
6710static bool
6711reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
6712			     enum insn_code icode)
6713
6714{
6715  enum reg_class new_class = scratch_reload_class (icode);
6716  enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
6717
6718  return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
6719				     new_class, new_mode);
6720}
6721
6722/* Generate insns to perform reload RL, which is for the insn in CHAIN and
6723   has the number J.  OLD contains the value to be used as input.  */
6724
6725static void
6726emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6727			 rtx old, int j)
6728{
6729  rtx insn = chain->insn;
6730  rtx reloadreg;
6731  rtx oldequiv_reg = 0;
6732  rtx oldequiv = 0;
6733  int special = 0;
6734  enum machine_mode mode;
6735  rtx *where;
6736
6737  /* delete_output_reload is only invoked properly if old contains
6738     the original pseudo register.  Since this is replaced with a
6739     hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6740     find the pseudo in RELOAD_IN_REG.  */
6741  if (reload_override_in[j]
6742      && REG_P (rl->in_reg))
6743    {
6744      oldequiv = old;
6745      old = rl->in_reg;
6746    }
6747  if (oldequiv == 0)
6748    oldequiv = old;
6749  else if (REG_P (oldequiv))
6750    oldequiv_reg = oldequiv;
6751  else if (GET_CODE (oldequiv) == SUBREG)
6752    oldequiv_reg = SUBREG_REG (oldequiv);
6753
6754  reloadreg = reload_reg_rtx_for_input[j];
6755  mode = GET_MODE (reloadreg);
6756
6757  /* If we are reloading from a register that was recently stored in
6758     with an output-reload, see if we can prove there was
6759     actually no need to store the old value in it.  */
6760
6761  if (optimize && REG_P (oldequiv)
6762      && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6763      && spill_reg_store[REGNO (oldequiv)]
6764      && REG_P (old)
6765      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6766	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6767			  rl->out_reg)))
6768    delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
6769
6770  /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
6771     OLDEQUIV.  */
6772
6773  while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6774    oldequiv = SUBREG_REG (oldequiv);
6775  if (GET_MODE (oldequiv) != VOIDmode
6776      && mode != GET_MODE (oldequiv))
6777    oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6778
6779  /* Switch to the right place to emit the reload insns.  */
6780  switch (rl->when_needed)
6781    {
6782    case RELOAD_OTHER:
6783      where = &other_input_reload_insns;
6784      break;
6785    case RELOAD_FOR_INPUT:
6786      where = &input_reload_insns[rl->opnum];
6787      break;
6788    case RELOAD_FOR_INPUT_ADDRESS:
6789      where = &input_address_reload_insns[rl->opnum];
6790      break;
6791    case RELOAD_FOR_INPADDR_ADDRESS:
6792      where = &inpaddr_address_reload_insns[rl->opnum];
6793      break;
6794    case RELOAD_FOR_OUTPUT_ADDRESS:
6795      where = &output_address_reload_insns[rl->opnum];
6796      break;
6797    case RELOAD_FOR_OUTADDR_ADDRESS:
6798      where = &outaddr_address_reload_insns[rl->opnum];
6799      break;
6800    case RELOAD_FOR_OPERAND_ADDRESS:
6801      where = &operand_reload_insns;
6802      break;
6803    case RELOAD_FOR_OPADDR_ADDR:
6804      where = &other_operand_reload_insns;
6805      break;
6806    case RELOAD_FOR_OTHER_ADDRESS:
6807      where = &other_input_address_reload_insns;
6808      break;
6809    default:
6810      gcc_unreachable ();
6811    }
6812
6813  push_to_sequence (*where);
6814
6815  /* Auto-increment addresses must be reloaded in a special way.  */
6816  if (rl->out && ! rl->out_reg)
6817    {
6818      /* We are not going to bother supporting the case where a
6819	 incremented register can't be copied directly from
6820	 OLDEQUIV since this seems highly unlikely.  */
6821      gcc_assert (rl->secondary_in_reload < 0);
6822
6823      if (reload_inherited[j])
6824	oldequiv = reloadreg;
6825
6826      old = XEXP (rl->in_reg, 0);
6827
6828      /* Prevent normal processing of this reload.  */
6829      special = 1;
6830      /* Output a special code sequence for this case, and forget about
6831	 spill reg information.  */
6832      new_spill_reg_store[REGNO (reloadreg)] = NULL;
6833      inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
6834    }
6835
6836  /* If we are reloading a pseudo-register that was set by the previous
6837     insn, see if we can get rid of that pseudo-register entirely
6838     by redirecting the previous insn into our reload register.  */
6839
6840  else if (optimize && REG_P (old)
6841	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
6842	   && dead_or_set_p (insn, old)
6843	   /* This is unsafe if some other reload
6844	      uses the same reg first.  */
6845	   && ! conflicts_with_override (reloadreg)
6846	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6847				rl->when_needed, old, rl->out, j, 0))
6848    {
6849      rtx temp = PREV_INSN (insn);
6850      while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
6851	temp = PREV_INSN (temp);
6852      if (temp
6853	  && NONJUMP_INSN_P (temp)
6854	  && GET_CODE (PATTERN (temp)) == SET
6855	  && SET_DEST (PATTERN (temp)) == old
6856	  /* Make sure we can access insn_operand_constraint.  */
6857	  && asm_noperands (PATTERN (temp)) < 0
6858	  /* This is unsafe if operand occurs more than once in current
6859	     insn.  Perhaps some occurrences aren't reloaded.  */
6860	  && count_occurrences (PATTERN (insn), old, 0) == 1)
6861	{
6862	  rtx old = SET_DEST (PATTERN (temp));
6863	  /* Store into the reload register instead of the pseudo.  */
6864	  SET_DEST (PATTERN (temp)) = reloadreg;
6865
6866	  /* Verify that resulting insn is valid.  */
6867	  extract_insn (temp);
6868	  if (constrain_operands (1))
6869	    {
6870	      /* If the previous insn is an output reload, the source is
6871		 a reload register, and its spill_reg_store entry will
6872		 contain the previous destination.  This is now
6873		 invalid.  */
6874	      if (REG_P (SET_SRC (PATTERN (temp)))
6875		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6876		{
6877		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6878		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6879		}
6880
6881	      /* If these are the only uses of the pseudo reg,
6882		 pretend for GDB it lives in the reload reg we used.  */
6883	      if (REG_N_DEATHS (REGNO (old)) == 1
6884		  && REG_N_SETS (REGNO (old)) == 1)
6885		{
6886		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
6887		  if (ira_conflicts_p)
6888		    /* Inform IRA about the change.  */
6889		    ira_mark_allocation_change (REGNO (old));
6890		  alter_reg (REGNO (old), -1, false);
6891		}
6892	      special = 1;
6893
6894	      /* Adjust any debug insns between temp and insn.  */
6895	      while ((temp = NEXT_INSN (temp)) != insn)
6896		if (DEBUG_INSN_P (temp))
6897		  replace_rtx (PATTERN (temp), old, reloadreg);
6898		else
6899		  gcc_assert (NOTE_P (temp));
6900	    }
6901	  else
6902	    {
6903	      SET_DEST (PATTERN (temp)) = old;
6904	    }
6905	}
6906    }
6907
6908  /* We can't do that, so output an insn to load RELOADREG.  */
6909
6910  /* If we have a secondary reload, pick up the secondary register
6911     and icode, if any.  If OLDEQUIV and OLD are different or
6912     if this is an in-out reload, recompute whether or not we
6913     still need a secondary register and what the icode should
6914     be.  If we still need a secondary register and the class or
6915     icode is different, go back to reloading from OLD if using
6916     OLDEQUIV means that we got the wrong type of register.  We
6917     cannot have different class or icode due to an in-out reload
6918     because we don't make such reloads when both the input and
6919     output need secondary reload registers.  */
6920
6921  if (! special && rl->secondary_in_reload >= 0)
6922    {
6923      rtx second_reload_reg = 0;
6924      rtx third_reload_reg = 0;
6925      int secondary_reload = rl->secondary_in_reload;
6926      rtx real_oldequiv = oldequiv;
6927      rtx real_old = old;
6928      rtx tmp;
6929      enum insn_code icode;
6930      enum insn_code tertiary_icode = CODE_FOR_nothing;
6931
6932      /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6933	 and similarly for OLD.
6934	 See comments in get_secondary_reload in reload.c.  */
6935      /* If it is a pseudo that cannot be replaced with its
6936	 equivalent MEM, we must fall back to reload_in, which
6937	 will have all the necessary substitutions registered.
6938	 Likewise for a pseudo that can't be replaced with its
6939	 equivalent constant.
6940
6941	 Take extra care for subregs of such pseudos.  Note that
6942	 we cannot use reg_equiv_mem in this case because it is
6943	 not in the right mode.  */
6944
6945      tmp = oldequiv;
6946      if (GET_CODE (tmp) == SUBREG)
6947	tmp = SUBREG_REG (tmp);
6948      if (REG_P (tmp)
6949	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6950	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6951	      || reg_equiv_constant[REGNO (tmp)] != 0))
6952	{
6953	  if (! reg_equiv_mem[REGNO (tmp)]
6954	      || num_not_at_initial_offset
6955	      || GET_CODE (oldequiv) == SUBREG)
6956	    real_oldequiv = rl->in;
6957	  else
6958	    real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6959	}
6960
6961      tmp = old;
6962      if (GET_CODE (tmp) == SUBREG)
6963	tmp = SUBREG_REG (tmp);
6964      if (REG_P (tmp)
6965	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6966	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6967	      || reg_equiv_constant[REGNO (tmp)] != 0))
6968	{
6969	  if (! reg_equiv_mem[REGNO (tmp)]
6970	      || num_not_at_initial_offset
6971	      || GET_CODE (old) == SUBREG)
6972	    real_old = rl->in;
6973	  else
6974	    real_old = reg_equiv_mem[REGNO (tmp)];
6975	}
6976
6977      second_reload_reg = rld[secondary_reload].reg_rtx;
6978      if (rld[secondary_reload].secondary_in_reload >= 0)
6979	{
6980	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
6981
6982	  third_reload_reg = rld[tertiary_reload].reg_rtx;
6983	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
6984	  /* We'd have to add more code for quartary reloads.  */
6985	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
6986	}
6987      icode = rl->secondary_in_icode;
6988
6989      if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6990	  || (rl->in != 0 && rl->out != 0))
6991	{
6992	  secondary_reload_info sri, sri2;
6993	  enum reg_class new_class, new_t_class;
6994
6995	  sri.icode = CODE_FOR_nothing;
6996	  sri.prev_sri = NULL;
6997	  new_class = targetm.secondary_reload (1, real_oldequiv, rl->rclass,
6998						mode, &sri);
6999
7000	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7001	    second_reload_reg = 0;
7002	  else if (new_class == NO_REGS)
7003	    {
7004	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7005					       third_reload_reg,
7006					       (enum insn_code) sri.icode))
7007		{
7008		  icode = (enum insn_code) sri.icode;
7009		  third_reload_reg = 0;
7010		}
7011	      else
7012		{
7013		  oldequiv = old;
7014		  real_oldequiv = real_old;
7015		}
7016	    }
7017	  else if (sri.icode != CODE_FOR_nothing)
7018	    /* We currently lack a way to express this in reloads.  */
7019	    gcc_unreachable ();
7020	  else
7021	    {
7022	      sri2.icode = CODE_FOR_nothing;
7023	      sri2.prev_sri = &sri;
7024	      new_t_class = targetm.secondary_reload (1, real_oldequiv,
7025						      new_class, mode, &sri);
7026	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7027		{
7028		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7029						  third_reload_reg,
7030						  new_class, mode))
7031		    {
7032		      third_reload_reg = 0;
7033		      tertiary_icode = (enum insn_code) sri2.icode;
7034		    }
7035		  else
7036		    {
7037		      oldequiv = old;
7038		      real_oldequiv = real_old;
7039		    }
7040		}
7041	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7042		{
7043		  rtx intermediate = second_reload_reg;
7044
7045		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7046						  new_class, mode)
7047		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7048						      ((enum insn_code)
7049						       sri2.icode)))
7050		    {
7051		      second_reload_reg = intermediate;
7052		      tertiary_icode = (enum insn_code) sri2.icode;
7053		    }
7054		  else
7055		    {
7056		      oldequiv = old;
7057		      real_oldequiv = real_old;
7058		    }
7059		}
7060	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7061		{
7062		  rtx intermediate = second_reload_reg;
7063
7064		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7065						  new_class, mode)
7066		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7067						      new_t_class, mode))
7068		    {
7069		      second_reload_reg = intermediate;
7070		      tertiary_icode = (enum insn_code) sri2.icode;
7071		    }
7072		  else
7073		    {
7074		      oldequiv = old;
7075		      real_oldequiv = real_old;
7076		    }
7077		}
7078	      else
7079		{
7080		  /* This could be handled more intelligently too.  */
7081		  oldequiv = old;
7082		  real_oldequiv = real_old;
7083		}
7084	    }
7085	}
7086
7087      /* If we still need a secondary reload register, check
7088	 to see if it is being used as a scratch or intermediate
7089	 register and generate code appropriately.  If we need
7090	 a scratch register, use REAL_OLDEQUIV since the form of
7091	 the insn may depend on the actual address if it is
7092	 a MEM.  */
7093
7094      if (second_reload_reg)
7095	{
7096	  if (icode != CODE_FOR_nothing)
7097	    {
7098	      /* We'd have to add extra code to handle this case.  */
7099	      gcc_assert (!third_reload_reg);
7100
7101	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7102					  second_reload_reg));
7103	      special = 1;
7104	    }
7105	  else
7106	    {
7107	      /* See if we need a scratch register to load the
7108		 intermediate register (a tertiary reload).  */
7109	      if (tertiary_icode != CODE_FOR_nothing)
7110		{
7111		  emit_insn ((GEN_FCN (tertiary_icode)
7112			      (second_reload_reg, real_oldequiv,
7113			       third_reload_reg)));
7114		}
7115	      else if (third_reload_reg)
7116		{
7117		  gen_reload (third_reload_reg, real_oldequiv,
7118			      rl->opnum,
7119			      rl->when_needed);
7120		  gen_reload (second_reload_reg, third_reload_reg,
7121			      rl->opnum,
7122			      rl->when_needed);
7123		}
7124	      else
7125		gen_reload (second_reload_reg, real_oldequiv,
7126			    rl->opnum,
7127			    rl->when_needed);
7128
7129	      oldequiv = second_reload_reg;
7130	    }
7131	}
7132    }
7133
7134  if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7135    {
7136      rtx real_oldequiv = oldequiv;
7137
7138      if ((REG_P (oldequiv)
7139	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7140	   && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7141	       || reg_equiv_constant[REGNO (oldequiv)] != 0))
7142	  || (GET_CODE (oldequiv) == SUBREG
7143	      && REG_P (SUBREG_REG (oldequiv))
7144	      && (REGNO (SUBREG_REG (oldequiv))
7145		  >= FIRST_PSEUDO_REGISTER)
7146	      && ((reg_equiv_memory_loc
7147		   [REGNO (SUBREG_REG (oldequiv))] != 0)
7148		  || (reg_equiv_constant
7149		      [REGNO (SUBREG_REG (oldequiv))] != 0)))
7150	  || (CONSTANT_P (oldequiv)
7151	      && (PREFERRED_RELOAD_CLASS (oldequiv,
7152					  REGNO_REG_CLASS (REGNO (reloadreg)))
7153		  == NO_REGS)))
7154	real_oldequiv = rl->in;
7155      gen_reload (reloadreg, real_oldequiv, rl->opnum,
7156		  rl->when_needed);
7157    }
7158
7159  if (flag_non_call_exceptions)
7160    copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7161
7162  /* End this sequence.  */
7163  *where = get_insns ();
7164  end_sequence ();
7165
7166  /* Update reload_override_in so that delete_address_reloads_1
7167     can see the actual register usage.  */
7168  if (oldequiv_reg)
7169    reload_override_in[j] = oldequiv;
7170}
7171
7172/* Generate insns to for the output reload RL, which is for the insn described
7173   by CHAIN and has the number J.  */
7174static void
7175emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7176			  int j)
7177{
7178  rtx reloadreg;
7179  rtx insn = chain->insn;
7180  int special = 0;
7181  rtx old = rl->out;
7182  enum machine_mode mode;
7183  rtx p;
7184  rtx rl_reg_rtx;
7185
7186  if (rl->when_needed == RELOAD_OTHER)
7187    start_sequence ();
7188  else
7189    push_to_sequence (output_reload_insns[rl->opnum]);
7190
7191  rl_reg_rtx = reload_reg_rtx_for_output[j];
7192  mode = GET_MODE (rl_reg_rtx);
7193
7194  reloadreg = rl_reg_rtx;
7195
7196  /* If we need two reload regs, set RELOADREG to the intermediate
7197     one, since it will be stored into OLD.  We might need a secondary
7198     register only for an input reload, so check again here.  */
7199
7200  if (rl->secondary_out_reload >= 0)
7201    {
7202      rtx real_old = old;
7203      int secondary_reload = rl->secondary_out_reload;
7204      int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7205
7206      if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7207	  && reg_equiv_mem[REGNO (old)] != 0)
7208	real_old = reg_equiv_mem[REGNO (old)];
7209
7210      if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7211	{
7212	  rtx second_reloadreg = reloadreg;
7213	  reloadreg = rld[secondary_reload].reg_rtx;
7214
7215	  /* See if RELOADREG is to be used as a scratch register
7216	     or as an intermediate register.  */
7217	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7218	    {
7219	      /* We'd have to add extra code to handle this case.  */
7220	      gcc_assert (tertiary_reload < 0);
7221
7222	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7223			  (real_old, second_reloadreg, reloadreg)));
7224	      special = 1;
7225	    }
7226	  else
7227	    {
7228	      /* See if we need both a scratch and intermediate reload
7229		 register.  */
7230
7231	      enum insn_code tertiary_icode
7232		= rld[secondary_reload].secondary_out_icode;
7233
7234	      /* We'd have to add more code for quartary reloads.  */
7235	      gcc_assert (tertiary_reload < 0
7236			  || rld[tertiary_reload].secondary_out_reload < 0);
7237
7238	      if (GET_MODE (reloadreg) != mode)
7239		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7240
7241	      if (tertiary_icode != CODE_FOR_nothing)
7242		{
7243		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7244		  rtx tem;
7245
7246		  /* Copy primary reload reg to secondary reload reg.
7247		     (Note that these have been swapped above, then
7248		     secondary reload reg to OLD using our insn.)  */
7249
7250		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7251		     and try to put the opposite SUBREG on
7252		     RELOADREG.  */
7253		  if (GET_CODE (real_old) == SUBREG
7254		      && (GET_MODE_SIZE (GET_MODE (real_old))
7255			  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7256		      && 0 != (tem = gen_lowpart_common
7257			       (GET_MODE (SUBREG_REG (real_old)),
7258				reloadreg)))
7259		    real_old = SUBREG_REG (real_old), reloadreg = tem;
7260
7261		  gen_reload (reloadreg, second_reloadreg,
7262			      rl->opnum, rl->when_needed);
7263		  emit_insn ((GEN_FCN (tertiary_icode)
7264			      (real_old, reloadreg, third_reloadreg)));
7265		  special = 1;
7266		}
7267
7268	      else
7269		{
7270		  /* Copy between the reload regs here and then to
7271		     OUT later.  */
7272
7273		  gen_reload (reloadreg, second_reloadreg,
7274			      rl->opnum, rl->when_needed);
7275		  if (tertiary_reload >= 0)
7276		    {
7277		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7278
7279		      gen_reload (third_reloadreg, reloadreg,
7280				  rl->opnum, rl->when_needed);
7281		      reloadreg = third_reloadreg;
7282		    }
7283		}
7284	    }
7285	}
7286    }
7287
7288  /* Output the last reload insn.  */
7289  if (! special)
7290    {
7291      rtx set;
7292
7293      /* Don't output the last reload if OLD is not the dest of
7294	 INSN and is in the src and is clobbered by INSN.  */
7295      if (! flag_expensive_optimizations
7296	  || !REG_P (old)
7297	  || !(set = single_set (insn))
7298	  || rtx_equal_p (old, SET_DEST (set))
7299	  || !reg_mentioned_p (old, SET_SRC (set))
7300	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7301	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7302	gen_reload (old, reloadreg, rl->opnum,
7303		    rl->when_needed);
7304    }
7305
7306  /* Look at all insns we emitted, just to be safe.  */
7307  for (p = get_insns (); p; p = NEXT_INSN (p))
7308    if (INSN_P (p))
7309      {
7310	rtx pat = PATTERN (p);
7311
7312	/* If this output reload doesn't come from a spill reg,
7313	   clear any memory of reloaded copies of the pseudo reg.
7314	   If this output reload comes from a spill reg,
7315	   reg_has_output_reload will make this do nothing.  */
7316	note_stores (pat, forget_old_reloads_1, NULL);
7317
7318	if (reg_mentioned_p (rl_reg_rtx, pat))
7319	  {
7320	    rtx set = single_set (insn);
7321	    if (reload_spill_index[j] < 0
7322		&& set
7323		&& SET_SRC (set) == rl_reg_rtx)
7324	      {
7325		int src = REGNO (SET_SRC (set));
7326
7327		reload_spill_index[j] = src;
7328		SET_HARD_REG_BIT (reg_is_output_reload, src);
7329		if (find_regno_note (insn, REG_DEAD, src))
7330		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7331	      }
7332	    if (HARD_REGISTER_P (rl_reg_rtx))
7333	      {
7334		int s = rl->secondary_out_reload;
7335		set = single_set (p);
7336		/* If this reload copies only to the secondary reload
7337		   register, the secondary reload does the actual
7338		   store.  */
7339		if (s >= 0 && set == NULL_RTX)
7340		  /* We can't tell what function the secondary reload
7341		     has and where the actual store to the pseudo is
7342		     made; leave new_spill_reg_store alone.  */
7343		  ;
7344		else if (s >= 0
7345			 && SET_SRC (set) == rl_reg_rtx
7346			 && SET_DEST (set) == rld[s].reg_rtx)
7347		  {
7348		    /* Usually the next instruction will be the
7349		       secondary reload insn;  if we can confirm
7350		       that it is, setting new_spill_reg_store to
7351		       that insn will allow an extra optimization.  */
7352		    rtx s_reg = rld[s].reg_rtx;
7353		    rtx next = NEXT_INSN (p);
7354		    rld[s].out = rl->out;
7355		    rld[s].out_reg = rl->out_reg;
7356		    set = single_set (next);
7357		    if (set && SET_SRC (set) == s_reg
7358			&& ! new_spill_reg_store[REGNO (s_reg)])
7359		      {
7360			SET_HARD_REG_BIT (reg_is_output_reload,
7361					  REGNO (s_reg));
7362			new_spill_reg_store[REGNO (s_reg)] = next;
7363		      }
7364		  }
7365		else
7366		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7367	      }
7368	  }
7369      }
7370
7371  if (rl->when_needed == RELOAD_OTHER)
7372    {
7373      emit_insn (other_output_reload_insns[rl->opnum]);
7374      other_output_reload_insns[rl->opnum] = get_insns ();
7375    }
7376  else
7377    output_reload_insns[rl->opnum] = get_insns ();
7378
7379  if (flag_non_call_exceptions)
7380    copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7381
7382  end_sequence ();
7383}
7384
7385/* Do input reloading for reload RL, which is for the insn described by CHAIN
7386   and has the number J.  */
7387static void
7388do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7389{
7390  rtx insn = chain->insn;
7391  rtx old = (rl->in && MEM_P (rl->in)
7392	     ? rl->in_reg : rl->in);
7393  rtx reg_rtx = rl->reg_rtx;
7394
7395  if (old && reg_rtx)
7396    {
7397      enum machine_mode mode;
7398
7399      /* Determine the mode to reload in.
7400	 This is very tricky because we have three to choose from.
7401	 There is the mode the insn operand wants (rl->inmode).
7402	 There is the mode of the reload register RELOADREG.
7403	 There is the intrinsic mode of the operand, which we could find
7404	 by stripping some SUBREGs.
7405	 It turns out that RELOADREG's mode is irrelevant:
7406	 we can change that arbitrarily.
7407
7408	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7409	 then the reload reg may not support QImode moves, so use SImode.
7410	 If foo is in memory due to spilling a pseudo reg, this is safe,
7411	 because the QImode value is in the least significant part of a
7412	 slot big enough for a SImode.  If foo is some other sort of
7413	 memory reference, then it is impossible to reload this case,
7414	 so previous passes had better make sure this never happens.
7415
7416	 Then consider a one-word union which has SImode and one of its
7417	 members is a float, being fetched as (SUBREG:SF union:SI).
7418	 We must fetch that as SFmode because we could be loading into
7419	 a float-only register.  In this case OLD's mode is correct.
7420
7421	 Consider an immediate integer: it has VOIDmode.  Here we need
7422	 to get a mode from something else.
7423
7424	 In some cases, there is a fourth mode, the operand's
7425	 containing mode.  If the insn specifies a containing mode for
7426	 this operand, it overrides all others.
7427
7428	 I am not sure whether the algorithm here is always right,
7429	 but it does the right things in those cases.  */
7430
7431      mode = GET_MODE (old);
7432      if (mode == VOIDmode)
7433	mode = rl->inmode;
7434
7435      /* We cannot use gen_lowpart_common since it can do the wrong thing
7436	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7437	 always be a REG here.  */
7438      if (GET_MODE (reg_rtx) != mode)
7439	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7440    }
7441  reload_reg_rtx_for_input[j] = reg_rtx;
7442
7443  if (old != 0
7444      /* AUTO_INC reloads need to be handled even if inherited.  We got an
7445	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7446      && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7447      && ! rtx_equal_p (reg_rtx, old)
7448      && reg_rtx != 0)
7449    emit_input_reload_insns (chain, rld + j, old, j);
7450
7451  /* When inheriting a wider reload, we have a MEM in rl->in,
7452     e.g. inheriting a SImode output reload for
7453     (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7454  if (optimize && reload_inherited[j] && rl->in
7455      && MEM_P (rl->in)
7456      && MEM_P (rl->in_reg)
7457      && reload_spill_index[j] >= 0
7458      && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7459    rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7460
7461  /* If we are reloading a register that was recently stored in with an
7462     output-reload, see if we can prove there was
7463     actually no need to store the old value in it.  */
7464
7465  if (optimize
7466      && (reload_inherited[j] || reload_override_in[j])
7467      && reg_rtx
7468      && REG_P (reg_rtx)
7469      && spill_reg_store[REGNO (reg_rtx)] != 0
7470#if 0
7471      /* There doesn't seem to be any reason to restrict this to pseudos
7472	 and doing so loses in the case where we are copying from a
7473	 register of the wrong class.  */
7474      && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7475#endif
7476      /* The insn might have already some references to stackslots
7477	 replaced by MEMs, while reload_out_reg still names the
7478	 original pseudo.  */
7479      && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7480	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7481    delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7482}
7483
7484/* Do output reloading for reload RL, which is for the insn described by
7485   CHAIN and has the number J.
7486   ??? At some point we need to support handling output reloads of
7487   JUMP_INSNs or insns that set cc0.  */
7488static void
7489do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7490{
7491  rtx note, old;
7492  rtx insn = chain->insn;
7493  /* If this is an output reload that stores something that is
7494     not loaded in this same reload, see if we can eliminate a previous
7495     store.  */
7496  rtx pseudo = rl->out_reg;
7497  rtx reg_rtx = rl->reg_rtx;
7498
7499  if (rl->out && reg_rtx)
7500    {
7501      enum machine_mode mode;
7502
7503      /* Determine the mode to reload in.
7504	 See comments above (for input reloading).  */
7505      mode = GET_MODE (rl->out);
7506      if (mode == VOIDmode)
7507	{
7508	  /* VOIDmode should never happen for an output.  */
7509	  if (asm_noperands (PATTERN (insn)) < 0)
7510	    /* It's the compiler's fault.  */
7511	    fatal_insn ("VOIDmode on an output", insn);
7512	  error_for_asm (insn, "output operand is constant in %<asm%>");
7513	  /* Prevent crash--use something we know is valid.  */
7514	  mode = word_mode;
7515	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7516	}
7517      if (GET_MODE (reg_rtx) != mode)
7518	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7519    }
7520  reload_reg_rtx_for_output[j] = reg_rtx;
7521
7522  if (pseudo
7523      && optimize
7524      && REG_P (pseudo)
7525      && ! rtx_equal_p (rl->in_reg, pseudo)
7526      && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7527      && reg_last_reload_reg[REGNO (pseudo)])
7528    {
7529      int pseudo_no = REGNO (pseudo);
7530      int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7531
7532      /* We don't need to test full validity of last_regno for
7533	 inherit here; we only want to know if the store actually
7534	 matches the pseudo.  */
7535      if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7536	  && reg_reloaded_contents[last_regno] == pseudo_no
7537	  && spill_reg_store[last_regno]
7538	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7539	delete_output_reload (insn, j, last_regno, reg_rtx);
7540    }
7541
7542  old = rl->out_reg;
7543  if (old == 0
7544      || reg_rtx == 0
7545      || rtx_equal_p (old, reg_rtx))
7546    return;
7547
7548  /* An output operand that dies right away does need a reload,
7549     but need not be copied from it.  Show the new location in the
7550     REG_UNUSED note.  */
7551  if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7552      && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7553    {
7554      XEXP (note, 0) = reg_rtx;
7555      return;
7556    }
7557  /* Likewise for a SUBREG of an operand that dies.  */
7558  else if (GET_CODE (old) == SUBREG
7559	   && REG_P (SUBREG_REG (old))
7560	   && 0 != (note = find_reg_note (insn, REG_UNUSED,
7561					  SUBREG_REG (old))))
7562    {
7563      XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7564      return;
7565    }
7566  else if (GET_CODE (old) == SCRATCH)
7567    /* If we aren't optimizing, there won't be a REG_UNUSED note,
7568       but we don't want to make an output reload.  */
7569    return;
7570
7571  /* If is a JUMP_INSN, we can't support output reloads yet.  */
7572  gcc_assert (NONJUMP_INSN_P (insn));
7573
7574  emit_output_reload_insns (chain, rld + j, j);
7575}
7576
7577/* A reload copies values of MODE from register SRC to register DEST.
7578   Return true if it can be treated for inheritance purposes like a
7579   group of reloads, each one reloading a single hard register.  The
7580   caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7581   occupy the same number of hard registers.  */
7582
7583static bool
7584inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7585		     int src ATTRIBUTE_UNUSED,
7586		     enum machine_mode mode ATTRIBUTE_UNUSED)
7587{
7588#ifdef CANNOT_CHANGE_MODE_CLASS
7589  return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7590	  && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7591#else
7592  return true;
7593#endif
7594}
7595
7596/* Output insns to reload values in and out of the chosen reload regs.  */
7597
7598static void
7599emit_reload_insns (struct insn_chain *chain)
7600{
7601  rtx insn = chain->insn;
7602
7603  int j;
7604
7605  CLEAR_HARD_REG_SET (reg_reloaded_died);
7606
7607  for (j = 0; j < reload_n_operands; j++)
7608    input_reload_insns[j] = input_address_reload_insns[j]
7609      = inpaddr_address_reload_insns[j]
7610      = output_reload_insns[j] = output_address_reload_insns[j]
7611      = outaddr_address_reload_insns[j]
7612      = other_output_reload_insns[j] = 0;
7613  other_input_address_reload_insns = 0;
7614  other_input_reload_insns = 0;
7615  operand_reload_insns = 0;
7616  other_operand_reload_insns = 0;
7617
7618  /* Dump reloads into the dump file.  */
7619  if (dump_file)
7620    {
7621      fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7622      debug_reload_to_stream (dump_file);
7623    }
7624
7625  /* Now output the instructions to copy the data into and out of the
7626     reload registers.  Do these in the order that the reloads were reported,
7627     since reloads of base and index registers precede reloads of operands
7628     and the operands may need the base and index registers reloaded.  */
7629
7630  for (j = 0; j < n_reloads; j++)
7631    {
7632      if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7633	{
7634	  unsigned int i;
7635
7636	  for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7637	    new_spill_reg_store[i] = 0;
7638	}
7639
7640      do_input_reload (chain, rld + j, j);
7641      do_output_reload (chain, rld + j, j);
7642    }
7643
7644  /* Now write all the insns we made for reloads in the order expected by
7645     the allocation functions.  Prior to the insn being reloaded, we write
7646     the following reloads:
7647
7648     RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7649
7650     RELOAD_OTHER reloads.
7651
7652     For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7653     by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7654     RELOAD_FOR_INPUT reload for the operand.
7655
7656     RELOAD_FOR_OPADDR_ADDRS reloads.
7657
7658     RELOAD_FOR_OPERAND_ADDRESS reloads.
7659
7660     After the insn being reloaded, we write the following:
7661
7662     For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7663     by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7664     RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7665     reloads for the operand.  The RELOAD_OTHER output reloads are
7666     output in descending order by reload number.  */
7667
7668  emit_insn_before (other_input_address_reload_insns, insn);
7669  emit_insn_before (other_input_reload_insns, insn);
7670
7671  for (j = 0; j < reload_n_operands; j++)
7672    {
7673      emit_insn_before (inpaddr_address_reload_insns[j], insn);
7674      emit_insn_before (input_address_reload_insns[j], insn);
7675      emit_insn_before (input_reload_insns[j], insn);
7676    }
7677
7678  emit_insn_before (other_operand_reload_insns, insn);
7679  emit_insn_before (operand_reload_insns, insn);
7680
7681  for (j = 0; j < reload_n_operands; j++)
7682    {
7683      rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7684      x = emit_insn_after (output_address_reload_insns[j], x);
7685      x = emit_insn_after (output_reload_insns[j], x);
7686      emit_insn_after (other_output_reload_insns[j], x);
7687    }
7688
7689  /* For all the spill regs newly reloaded in this instruction,
7690     record what they were reloaded from, so subsequent instructions
7691     can inherit the reloads.
7692
7693     Update spill_reg_store for the reloads of this insn.
7694     Copy the elements that were updated in the loop above.  */
7695
7696  for (j = 0; j < n_reloads; j++)
7697    {
7698      int r = reload_order[j];
7699      int i = reload_spill_index[r];
7700
7701      /* If this is a non-inherited input reload from a pseudo, we must
7702	 clear any memory of a previous store to the same pseudo.  Only do
7703	 something if there will not be an output reload for the pseudo
7704	 being reloaded.  */
7705      if (rld[r].in_reg != 0
7706	  && ! (reload_inherited[r] || reload_override_in[r]))
7707	{
7708	  rtx reg = rld[r].in_reg;
7709
7710	  if (GET_CODE (reg) == SUBREG)
7711	    reg = SUBREG_REG (reg);
7712
7713	  if (REG_P (reg)
7714	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7715	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
7716	    {
7717	      int nregno = REGNO (reg);
7718
7719	      if (reg_last_reload_reg[nregno])
7720		{
7721		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
7722
7723		  if (reg_reloaded_contents[last_regno] == nregno)
7724		    spill_reg_store[last_regno] = 0;
7725		}
7726	    }
7727	}
7728
7729      /* I is nonneg if this reload used a register.
7730	 If rld[r].reg_rtx is 0, this is an optional reload
7731	 that we opted to ignore.  */
7732
7733      if (i >= 0 && rld[r].reg_rtx != 0)
7734	{
7735	  int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
7736	  int k;
7737
7738	  /* For a multi register reload, we need to check if all or part
7739	     of the value lives to the end.  */
7740	  for (k = 0; k < nr; k++)
7741	    if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7742					  rld[r].when_needed))
7743	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7744
7745	  /* Maybe the spill reg contains a copy of reload_out.  */
7746	  if (rld[r].out != 0
7747	      && (REG_P (rld[r].out)
7748		  || (rld[r].out_reg
7749		      ? REG_P (rld[r].out_reg)
7750		      /* The reload value is an auto-modification of
7751			 some kind.  For PRE_INC, POST_INC, PRE_DEC
7752			 and POST_DEC, we record an equivalence
7753			 between the reload register and the operand
7754			 on the optimistic assumption that we can make
7755			 the equivalence hold.  reload_as_needed must
7756			 then either make it hold or invalidate the
7757			 equivalence.
7758
7759			 PRE_MODIFY and POST_MODIFY addresses are reloaded
7760			 somewhat differently, and allowing them here leads
7761			 to problems.  */
7762		      : (GET_CODE (rld[r].out) != POST_MODIFY
7763			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
7764	    {
7765	      rtx reg;
7766	      enum machine_mode mode;
7767	      int regno, nregs;
7768
7769	      reg = reload_reg_rtx_for_output[r];
7770	      mode = GET_MODE (reg);
7771	      regno = REGNO (reg);
7772	      nregs = hard_regno_nregs[regno][mode];
7773	      if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7774					   rld[r].when_needed))
7775		{
7776		  rtx out = (REG_P (rld[r].out)
7777			     ? rld[r].out
7778			     : rld[r].out_reg
7779			     ? rld[r].out_reg
7780/* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
7781		  int out_regno = REGNO (out);
7782		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
7783				   : hard_regno_nregs[out_regno][mode]);
7784		  bool piecemeal;
7785
7786		  spill_reg_store[regno] = new_spill_reg_store[regno];
7787		  spill_reg_stored_to[regno] = out;
7788		  reg_last_reload_reg[out_regno] = reg;
7789
7790		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
7791			       && nregs == out_nregs
7792			       && inherit_piecemeal_p (out_regno, regno, mode));
7793
7794		  /* If OUT_REGNO is a hard register, it may occupy more than
7795		     one register.  If it does, say what is in the
7796		     rest of the registers assuming that both registers
7797		     agree on how many words the object takes.  If not,
7798		     invalidate the subsequent registers.  */
7799
7800		  if (HARD_REGISTER_NUM_P (out_regno))
7801		    for (k = 1; k < out_nregs; k++)
7802		      reg_last_reload_reg[out_regno + k]
7803			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
7804
7805		  /* Now do the inverse operation.  */
7806		  for (k = 0; k < nregs; k++)
7807		    {
7808		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
7809		      reg_reloaded_contents[regno + k]
7810			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
7811			   ? out_regno
7812			   : out_regno + k);
7813		      reg_reloaded_insn[regno + k] = insn;
7814		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
7815		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
7816			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7817					  regno + k);
7818		      else
7819			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7820					    regno + k);
7821		    }
7822		}
7823	    }
7824	  /* Maybe the spill reg contains a copy of reload_in.  Only do
7825	     something if there will not be an output reload for
7826	     the register being reloaded.  */
7827	  else if (rld[r].out_reg == 0
7828		   && rld[r].in != 0
7829		   && ((REG_P (rld[r].in)
7830			&& !HARD_REGISTER_P (rld[r].in)
7831			&& !REGNO_REG_SET_P (&reg_has_output_reload,
7832					     REGNO (rld[r].in)))
7833		       || (REG_P (rld[r].in_reg)
7834			   && !REGNO_REG_SET_P (&reg_has_output_reload,
7835						REGNO (rld[r].in_reg))))
7836		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
7837	    {
7838	      rtx reg;
7839	      enum machine_mode mode;
7840	      int regno, nregs;
7841
7842	      reg = reload_reg_rtx_for_input[r];
7843	      mode = GET_MODE (reg);
7844	      regno = REGNO (reg);
7845	      nregs = hard_regno_nregs[regno][mode];
7846	      if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
7847					   rld[r].when_needed))
7848		{
7849		  int in_regno;
7850		  int in_nregs;
7851		  rtx in;
7852		  bool piecemeal;
7853
7854		  if (REG_P (rld[r].in)
7855		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7856		    in = rld[r].in;
7857		  else if (REG_P (rld[r].in_reg))
7858		    in = rld[r].in_reg;
7859		  else
7860		    in = XEXP (rld[r].in_reg, 0);
7861		  in_regno = REGNO (in);
7862
7863		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
7864			      : hard_regno_nregs[in_regno][mode]);
7865
7866		  reg_last_reload_reg[in_regno] = reg;
7867
7868		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
7869			       && nregs == in_nregs
7870			       && inherit_piecemeal_p (regno, in_regno, mode));
7871
7872		  if (HARD_REGISTER_NUM_P (in_regno))
7873		    for (k = 1; k < in_nregs; k++)
7874		      reg_last_reload_reg[in_regno + k]
7875			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
7876
7877		  /* Unless we inherited this reload, show we haven't
7878		     recently done a store.
7879		     Previous stores of inherited auto_inc expressions
7880		     also have to be discarded.  */
7881		  if (! reload_inherited[r]
7882		      || (rld[r].out && ! rld[r].out_reg))
7883		    spill_reg_store[regno] = 0;
7884
7885		  for (k = 0; k < nregs; k++)
7886		    {
7887		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
7888		      reg_reloaded_contents[regno + k]
7889			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
7890			   ? in_regno
7891			   : in_regno + k);
7892		      reg_reloaded_insn[regno + k] = insn;
7893		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
7894		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
7895			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7896					  regno + k);
7897		      else
7898			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7899					    regno + k);
7900		    }
7901		}
7902	    }
7903	}
7904
7905      /* The following if-statement was #if 0'd in 1.34 (or before...).
7906	 It's reenabled in 1.35 because supposedly nothing else
7907	 deals with this problem.  */
7908
7909      /* If a register gets output-reloaded from a non-spill register,
7910	 that invalidates any previous reloaded copy of it.
7911	 But forget_old_reloads_1 won't get to see it, because
7912	 it thinks only about the original insn.  So invalidate it here.
7913	 Also do the same thing for RELOAD_OTHER constraints where the
7914	 output is discarded.  */
7915      if (i < 0
7916	  && ((rld[r].out != 0
7917	       && (REG_P (rld[r].out)
7918		   || (MEM_P (rld[r].out)
7919		       && REG_P (rld[r].out_reg))))
7920	      || (rld[r].out == 0 && rld[r].out_reg
7921		  && REG_P (rld[r].out_reg))))
7922	{
7923	  rtx out = ((rld[r].out && REG_P (rld[r].out))
7924		     ? rld[r].out : rld[r].out_reg);
7925	  int out_regno = REGNO (out);
7926	  enum machine_mode mode = GET_MODE (out);
7927
7928	  /* REG_RTX is now set or clobbered by the main instruction.
7929	     As the comment above explains, forget_old_reloads_1 only
7930	     sees the original instruction, and there is no guarantee
7931	     that the original instruction also clobbered REG_RTX.
7932	     For example, if find_reloads sees that the input side of
7933	     a matched operand pair dies in this instruction, it may
7934	     use the input register as the reload register.
7935
7936	     Calling forget_old_reloads_1 is a waste of effort if
7937	     REG_RTX is also the output register.
7938
7939	     If we know that REG_RTX holds the value of a pseudo
7940	     register, the code after the call will record that fact.  */
7941	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
7942	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
7943
7944	  if (!HARD_REGISTER_NUM_P (out_regno))
7945	    {
7946	      rtx src_reg, store_insn = NULL_RTX;
7947
7948	      reg_last_reload_reg[out_regno] = 0;
7949
7950	      /* If we can find a hard register that is stored, record
7951		 the storing insn so that we may delete this insn with
7952		 delete_output_reload.  */
7953	      src_reg = reload_reg_rtx_for_output[r];
7954
7955	      /* If this is an optional reload, try to find the source reg
7956		 from an input reload.  */
7957	      if (! src_reg)
7958		{
7959		  rtx set = single_set (insn);
7960		  if (set && SET_DEST (set) == rld[r].out)
7961		    {
7962		      int k;
7963
7964		      src_reg = SET_SRC (set);
7965		      store_insn = insn;
7966		      for (k = 0; k < n_reloads; k++)
7967			{
7968			  if (rld[k].in == src_reg)
7969			    {
7970			      src_reg = reload_reg_rtx_for_input[k];
7971			      break;
7972			    }
7973			}
7974		    }
7975		}
7976	      else
7977		store_insn = new_spill_reg_store[REGNO (src_reg)];
7978	      if (src_reg && REG_P (src_reg)
7979		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7980		{
7981		  int src_regno, src_nregs, k;
7982		  rtx note;
7983
7984		  gcc_assert (GET_MODE (src_reg) == mode);
7985		  src_regno = REGNO (src_reg);
7986		  src_nregs = hard_regno_nregs[src_regno][mode];
7987		  /* The place where to find a death note varies with
7988		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
7989		     necessarily checked exactly in the code that moves
7990		     notes, so just check both locations.  */
7991		  note = find_regno_note (insn, REG_DEAD, src_regno);
7992		  if (! note && store_insn)
7993		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
7994		  for (k = 0; k < src_nregs; k++)
7995		    {
7996		      spill_reg_store[src_regno + k] = store_insn;
7997		      spill_reg_stored_to[src_regno + k] = out;
7998		      reg_reloaded_contents[src_regno + k] = out_regno;
7999		      reg_reloaded_insn[src_regno + k] = store_insn;
8000		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8001		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8002		      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8003							  mode))
8004			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8005					  src_regno + k);
8006		      else
8007			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8008					    src_regno + k);
8009		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8010		      if (note)
8011			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8012		      else
8013			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8014		    }
8015		  reg_last_reload_reg[out_regno] = src_reg;
8016		  /* We have to set reg_has_output_reload here, or else
8017		     forget_old_reloads_1 will clear reg_last_reload_reg
8018		     right away.  */
8019		  SET_REGNO_REG_SET (&reg_has_output_reload,
8020				     out_regno);
8021		}
8022	    }
8023	  else
8024	    {
8025	      int k, out_nregs = hard_regno_nregs[out_regno][mode];
8026
8027	      for (k = 0; k < out_nregs; k++)
8028		reg_last_reload_reg[out_regno + k] = 0;
8029	    }
8030	}
8031    }
8032  IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8033}
8034
8035/* Go through the motions to emit INSN and test if it is strictly valid.
8036   Return the emitted insn if valid, else return NULL.  */
8037
8038static rtx
8039emit_insn_if_valid_for_reload (rtx insn)
8040{
8041  rtx last = get_last_insn ();
8042  int code;
8043
8044  insn = emit_insn (insn);
8045  code = recog_memoized (insn);
8046
8047  if (code >= 0)
8048    {
8049      extract_insn (insn);
8050      /* We want constrain operands to treat this insn strictly in its
8051	 validity determination, i.e., the way it would after reload has
8052	 completed.  */
8053      if (constrain_operands (1))
8054	return insn;
8055    }
8056
8057  delete_insns_since (last);
8058  return NULL;
8059}
8060
8061/* Emit code to perform a reload from IN (which may be a reload register) to
8062   OUT (which may also be a reload register).  IN or OUT is from operand
8063   OPNUM with reload type TYPE.
8064
8065   Returns first insn emitted.  */
8066
8067static rtx
8068gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8069{
8070  rtx last = get_last_insn ();
8071  rtx tem;
8072
8073  /* If IN is a paradoxical SUBREG, remove it and try to put the
8074     opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8075  if (GET_CODE (in) == SUBREG
8076      && (GET_MODE_SIZE (GET_MODE (in))
8077	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8078      && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8079    in = SUBREG_REG (in), out = tem;
8080  else if (GET_CODE (out) == SUBREG
8081	   && (GET_MODE_SIZE (GET_MODE (out))
8082	       > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8083	   && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8084    out = SUBREG_REG (out), in = tem;
8085
8086  /* How to do this reload can get quite tricky.  Normally, we are being
8087     asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8088     register that didn't get a hard register.  In that case we can just
8089     call emit_move_insn.
8090
8091     We can also be asked to reload a PLUS that adds a register or a MEM to
8092     another register, constant or MEM.  This can occur during frame pointer
8093     elimination and while reloading addresses.  This case is handled by
8094     trying to emit a single insn to perform the add.  If it is not valid,
8095     we use a two insn sequence.
8096
8097     Or we can be asked to reload an unary operand that was a fragment of
8098     an addressing mode, into a register.  If it isn't recognized as-is,
8099     we try making the unop operand and the reload-register the same:
8100     (set reg:X (unop:X expr:Y))
8101     -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8102
8103     Finally, we could be called to handle an 'o' constraint by putting
8104     an address into a register.  In that case, we first try to do this
8105     with a named pattern of "reload_load_address".  If no such pattern
8106     exists, we just emit a SET insn and hope for the best (it will normally
8107     be valid on machines that use 'o').
8108
8109     This entire process is made complex because reload will never
8110     process the insns we generate here and so we must ensure that
8111     they will fit their constraints and also by the fact that parts of
8112     IN might be being reloaded separately and replaced with spill registers.
8113     Because of this, we are, in some sense, just guessing the right approach
8114     here.  The one listed above seems to work.
8115
8116     ??? At some point, this whole thing needs to be rethought.  */
8117
8118  if (GET_CODE (in) == PLUS
8119      && (REG_P (XEXP (in, 0))
8120	  || GET_CODE (XEXP (in, 0)) == SUBREG
8121	  || MEM_P (XEXP (in, 0)))
8122      && (REG_P (XEXP (in, 1))
8123	  || GET_CODE (XEXP (in, 1)) == SUBREG
8124	  || CONSTANT_P (XEXP (in, 1))
8125	  || MEM_P (XEXP (in, 1))))
8126    {
8127      /* We need to compute the sum of a register or a MEM and another
8128	 register, constant, or MEM, and put it into the reload
8129	 register.  The best possible way of doing this is if the machine
8130	 has a three-operand ADD insn that accepts the required operands.
8131
8132	 The simplest approach is to try to generate such an insn and see if it
8133	 is recognized and matches its constraints.  If so, it can be used.
8134
8135	 It might be better not to actually emit the insn unless it is valid,
8136	 but we need to pass the insn as an operand to `recog' and
8137	 `extract_insn' and it is simpler to emit and then delete the insn if
8138	 not valid than to dummy things up.  */
8139
8140      rtx op0, op1, tem, insn;
8141      int code;
8142
8143      op0 = find_replacement (&XEXP (in, 0));
8144      op1 = find_replacement (&XEXP (in, 1));
8145
8146      /* Since constraint checking is strict, commutativity won't be
8147	 checked, so we need to do that here to avoid spurious failure
8148	 if the add instruction is two-address and the second operand
8149	 of the add is the same as the reload reg, which is frequently
8150	 the case.  If the insn would be A = B + A, rearrange it so
8151	 it will be A = A + B as constrain_operands expects.  */
8152
8153      if (REG_P (XEXP (in, 1))
8154	  && REGNO (out) == REGNO (XEXP (in, 1)))
8155	tem = op0, op0 = op1, op1 = tem;
8156
8157      if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8158	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8159
8160      insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8161      if (insn)
8162	return insn;
8163
8164      /* If that failed, we must use a conservative two-insn sequence.
8165
8166	 Use a move to copy one operand into the reload register.  Prefer
8167	 to reload a constant, MEM or pseudo since the move patterns can
8168	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8169	 pseudo and OP1 is not a valid operand for an add instruction, then
8170	 reload OP1.
8171
8172	 After reloading one of the operands into the reload register, add
8173	 the reload register to the output register.
8174
8175	 If there is another way to do this for a specific machine, a
8176	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8177	 we emit below.  */
8178
8179      code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8180
8181      if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8182	  || (REG_P (op1)
8183	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8184	  || (code != CODE_FOR_nothing
8185	      && ! ((*insn_data[code].operand[2].predicate)
8186		    (op1, insn_data[code].operand[2].mode))))
8187	tem = op0, op0 = op1, op1 = tem;
8188
8189      gen_reload (out, op0, opnum, type);
8190
8191      /* If OP0 and OP1 are the same, we can use OUT for OP1.
8192	 This fixes a problem on the 32K where the stack pointer cannot
8193	 be used as an operand of an add insn.  */
8194
8195      if (rtx_equal_p (op0, op1))
8196	op1 = out;
8197
8198      insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8199      if (insn)
8200	{
8201	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8202	  set_unique_reg_note (insn, REG_EQUIV, in);
8203	  return insn;
8204	}
8205
8206      /* If that failed, copy the address register to the reload register.
8207	 Then add the constant to the reload register.  */
8208
8209      gcc_assert (!reg_overlap_mentioned_p (out, op0));
8210      gen_reload (out, op1, opnum, type);
8211      insn = emit_insn (gen_add2_insn (out, op0));
8212      set_unique_reg_note (insn, REG_EQUIV, in);
8213    }
8214
8215#ifdef SECONDARY_MEMORY_NEEDED
8216  /* If we need a memory location to do the move, do it that way.  */
8217  else if ((REG_P (in)
8218            || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8219	   && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8220	   && (REG_P (out)
8221	       || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8222	   && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8223	   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8224				       REGNO_REG_CLASS (reg_or_subregno (out)),
8225				       GET_MODE (out)))
8226    {
8227      /* Get the memory to use and rewrite both registers to its mode.  */
8228      rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8229
8230      if (GET_MODE (loc) != GET_MODE (out))
8231	out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8232
8233      if (GET_MODE (loc) != GET_MODE (in))
8234	in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8235
8236      gen_reload (loc, in, opnum, type);
8237      gen_reload (out, loc, opnum, type);
8238    }
8239#endif
8240  else if (REG_P (out) && UNARY_P (in))
8241    {
8242      rtx insn;
8243      rtx op1;
8244      rtx out_moded;
8245      rtx set;
8246
8247      op1 = find_replacement (&XEXP (in, 0));
8248      if (op1 != XEXP (in, 0))
8249	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8250
8251      /* First, try a plain SET.  */
8252      set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8253      if (set)
8254	return set;
8255
8256      /* If that failed, move the inner operand to the reload
8257	 register, and try the same unop with the inner expression
8258	 replaced with the reload register.  */
8259
8260      if (GET_MODE (op1) != GET_MODE (out))
8261	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8262      else
8263	out_moded = out;
8264
8265      gen_reload (out_moded, op1, opnum, type);
8266
8267      insn
8268	= gen_rtx_SET (VOIDmode, out,
8269		       gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8270				      out_moded));
8271      insn = emit_insn_if_valid_for_reload (insn);
8272      if (insn)
8273	{
8274	  set_unique_reg_note (insn, REG_EQUIV, in);
8275	  return insn;
8276	}
8277
8278      fatal_insn ("Failure trying to reload:", set);
8279    }
8280  /* If IN is a simple operand, use gen_move_insn.  */
8281  else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8282    {
8283      tem = emit_insn (gen_move_insn (out, in));
8284      /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8285      mark_jump_label (in, tem, 0);
8286    }
8287
8288#ifdef HAVE_reload_load_address
8289  else if (HAVE_reload_load_address)
8290    emit_insn (gen_reload_load_address (out, in));
8291#endif
8292
8293  /* Otherwise, just write (set OUT IN) and hope for the best.  */
8294  else
8295    emit_insn (gen_rtx_SET (VOIDmode, out, in));
8296
8297  /* Return the first insn emitted.
8298     We can not just return get_last_insn, because there may have
8299     been multiple instructions emitted.  Also note that gen_move_insn may
8300     emit more than one insn itself, so we can not assume that there is one
8301     insn emitted per emit_insn_before call.  */
8302
8303  return last ? NEXT_INSN (last) : get_insns ();
8304}
8305
8306/* Delete a previously made output-reload whose result we now believe
8307   is not needed.  First we double-check.
8308
8309   INSN is the insn now being processed.
8310   LAST_RELOAD_REG is the hard register number for which we want to delete
8311   the last output reload.
8312   J is the reload-number that originally used REG.  The caller has made
8313   certain that reload J doesn't use REG any longer for input.
8314   NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8315
8316static void
8317delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8318{
8319  rtx output_reload_insn = spill_reg_store[last_reload_reg];
8320  rtx reg = spill_reg_stored_to[last_reload_reg];
8321  int k;
8322  int n_occurrences;
8323  int n_inherited = 0;
8324  rtx i1;
8325  rtx substed;
8326  unsigned regno;
8327  int nregs;
8328
8329  /* It is possible that this reload has been only used to set another reload
8330     we eliminated earlier and thus deleted this instruction too.  */
8331  if (INSN_DELETED_P (output_reload_insn))
8332    return;
8333
8334  /* Get the raw pseudo-register referred to.  */
8335
8336  while (GET_CODE (reg) == SUBREG)
8337    reg = SUBREG_REG (reg);
8338  substed = reg_equiv_memory_loc[REGNO (reg)];
8339
8340  /* This is unsafe if the operand occurs more often in the current
8341     insn than it is inherited.  */
8342  for (k = n_reloads - 1; k >= 0; k--)
8343    {
8344      rtx reg2 = rld[k].in;
8345      if (! reg2)
8346	continue;
8347      if (MEM_P (reg2) || reload_override_in[k])
8348	reg2 = rld[k].in_reg;
8349#ifdef AUTO_INC_DEC
8350      if (rld[k].out && ! rld[k].out_reg)
8351	reg2 = XEXP (rld[k].in_reg, 0);
8352#endif
8353      while (GET_CODE (reg2) == SUBREG)
8354	reg2 = SUBREG_REG (reg2);
8355      if (rtx_equal_p (reg2, reg))
8356	{
8357	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8358	    n_inherited++;
8359	  else
8360	    return;
8361	}
8362    }
8363  n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8364  if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8365    n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8366					reg, 0);
8367  if (substed)
8368    n_occurrences += count_occurrences (PATTERN (insn),
8369					eliminate_regs (substed, VOIDmode,
8370							NULL_RTX), 0);
8371  for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8372    {
8373      gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8374      n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8375    }
8376  if (n_occurrences > n_inherited)
8377    return;
8378
8379  regno = REGNO (reg);
8380  if (regno >= FIRST_PSEUDO_REGISTER)
8381    nregs = 1;
8382  else
8383    nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8384
8385  /* If the pseudo-reg we are reloading is no longer referenced
8386     anywhere between the store into it and here,
8387     and we're within the same basic block, then the value can only
8388     pass through the reload reg and end up here.
8389     Otherwise, give up--return.  */
8390  for (i1 = NEXT_INSN (output_reload_insn);
8391       i1 != insn; i1 = NEXT_INSN (i1))
8392    {
8393      if (NOTE_INSN_BASIC_BLOCK_P (i1))
8394	return;
8395      if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8396	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8397	{
8398	  /* If this is USE in front of INSN, we only have to check that
8399	     there are no more references than accounted for by inheritance.  */
8400	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8401	    {
8402	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8403	      i1 = NEXT_INSN (i1);
8404	    }
8405	  if (n_occurrences <= n_inherited && i1 == insn)
8406	    break;
8407	  return;
8408	}
8409    }
8410
8411  /* We will be deleting the insn.  Remove the spill reg information.  */
8412  for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8413    {
8414      spill_reg_store[last_reload_reg + k] = 0;
8415      spill_reg_stored_to[last_reload_reg + k] = 0;
8416    }
8417
8418  /* The caller has already checked that REG dies or is set in INSN.
8419     It has also checked that we are optimizing, and thus some
8420     inaccuracies in the debugging information are acceptable.
8421     So we could just delete output_reload_insn.  But in some cases
8422     we can improve the debugging information without sacrificing
8423     optimization - maybe even improving the code: See if the pseudo
8424     reg has been completely replaced with reload regs.  If so, delete
8425     the store insn and forget we had a stack slot for the pseudo.  */
8426  if (rld[j].out != rld[j].in
8427      && REG_N_DEATHS (REGNO (reg)) == 1
8428      && REG_N_SETS (REGNO (reg)) == 1
8429      && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8430      && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8431    {
8432      rtx i2;
8433
8434      /* We know that it was used only between here and the beginning of
8435	 the current basic block.  (We also know that the last use before
8436	 INSN was the output reload we are thinking of deleting, but never
8437	 mind that.)  Search that range; see if any ref remains.  */
8438      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8439	{
8440	  rtx set = single_set (i2);
8441
8442	  /* Uses which just store in the pseudo don't count,
8443	     since if they are the only uses, they are dead.  */
8444	  if (set != 0 && SET_DEST (set) == reg)
8445	    continue;
8446	  if (LABEL_P (i2)
8447	      || JUMP_P (i2))
8448	    break;
8449	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8450	      && reg_mentioned_p (reg, PATTERN (i2)))
8451	    {
8452	      /* Some other ref remains; just delete the output reload we
8453		 know to be dead.  */
8454	      delete_address_reloads (output_reload_insn, insn);
8455	      delete_insn (output_reload_insn);
8456	      return;
8457	    }
8458	}
8459
8460      /* Delete the now-dead stores into this pseudo.  Note that this
8461	 loop also takes care of deleting output_reload_insn.  */
8462      for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8463	{
8464	  rtx set = single_set (i2);
8465
8466	  if (set != 0 && SET_DEST (set) == reg)
8467	    {
8468	      delete_address_reloads (i2, insn);
8469	      delete_insn (i2);
8470	    }
8471	  if (LABEL_P (i2)
8472	      || JUMP_P (i2))
8473	    break;
8474	}
8475
8476      /* For the debugging info, say the pseudo lives in this reload reg.  */
8477      reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8478      if (ira_conflicts_p)
8479	/* Inform IRA about the change.  */
8480	ira_mark_allocation_change (REGNO (reg));
8481      alter_reg (REGNO (reg), -1, false);
8482    }
8483  else
8484    {
8485      delete_address_reloads (output_reload_insn, insn);
8486      delete_insn (output_reload_insn);
8487    }
8488}
8489
8490/* We are going to delete DEAD_INSN.  Recursively delete loads of
8491   reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8492   CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8493static void
8494delete_address_reloads (rtx dead_insn, rtx current_insn)
8495{
8496  rtx set = single_set (dead_insn);
8497  rtx set2, dst, prev, next;
8498  if (set)
8499    {
8500      rtx dst = SET_DEST (set);
8501      if (MEM_P (dst))
8502	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8503    }
8504  /* If we deleted the store from a reloaded post_{in,de}c expression,
8505     we can delete the matching adds.  */
8506  prev = PREV_INSN (dead_insn);
8507  next = NEXT_INSN (dead_insn);
8508  if (! prev || ! next)
8509    return;
8510  set = single_set (next);
8511  set2 = single_set (prev);
8512  if (! set || ! set2
8513      || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8514      || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8515      || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8516    return;
8517  dst = SET_DEST (set);
8518  if (! rtx_equal_p (dst, SET_DEST (set2))
8519      || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8520      || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8521      || (INTVAL (XEXP (SET_SRC (set), 1))
8522	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
8523    return;
8524  delete_related_insns (prev);
8525  delete_related_insns (next);
8526}
8527
8528/* Subfunction of delete_address_reloads: process registers found in X.  */
8529static void
8530delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
8531{
8532  rtx prev, set, dst, i2;
8533  int i, j;
8534  enum rtx_code code = GET_CODE (x);
8535
8536  if (code != REG)
8537    {
8538      const char *fmt = GET_RTX_FORMAT (code);
8539      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8540	{
8541	  if (fmt[i] == 'e')
8542	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8543	  else if (fmt[i] == 'E')
8544	    {
8545	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8546		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8547					  current_insn);
8548	    }
8549	}
8550      return;
8551    }
8552
8553  if (spill_reg_order[REGNO (x)] < 0)
8554    return;
8555
8556  /* Scan backwards for the insn that sets x.  This might be a way back due
8557     to inheritance.  */
8558  for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8559    {
8560      code = GET_CODE (prev);
8561      if (code == CODE_LABEL || code == JUMP_INSN)
8562	return;
8563      if (!INSN_P (prev))
8564	continue;
8565      if (reg_set_p (x, PATTERN (prev)))
8566	break;
8567      if (reg_referenced_p (x, PATTERN (prev)))
8568	return;
8569    }
8570  if (! prev || INSN_UID (prev) < reload_first_uid)
8571    return;
8572  /* Check that PREV only sets the reload register.  */
8573  set = single_set (prev);
8574  if (! set)
8575    return;
8576  dst = SET_DEST (set);
8577  if (!REG_P (dst)
8578      || ! rtx_equal_p (dst, x))
8579    return;
8580  if (! reg_set_p (dst, PATTERN (dead_insn)))
8581    {
8582      /* Check if DST was used in a later insn -
8583	 it might have been inherited.  */
8584      for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8585	{
8586	  if (LABEL_P (i2))
8587	    break;
8588	  if (! INSN_P (i2))
8589	    continue;
8590	  if (reg_referenced_p (dst, PATTERN (i2)))
8591	    {
8592	      /* If there is a reference to the register in the current insn,
8593		 it might be loaded in a non-inherited reload.  If no other
8594		 reload uses it, that means the register is set before
8595		 referenced.  */
8596	      if (i2 == current_insn)
8597		{
8598		  for (j = n_reloads - 1; j >= 0; j--)
8599		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
8600			|| reload_override_in[j] == dst)
8601		      return;
8602		  for (j = n_reloads - 1; j >= 0; j--)
8603		    if (rld[j].in && rld[j].reg_rtx == dst)
8604		      break;
8605		  if (j >= 0)
8606		    break;
8607		}
8608	      return;
8609	    }
8610	  if (JUMP_P (i2))
8611	    break;
8612	  /* If DST is still live at CURRENT_INSN, check if it is used for
8613	     any reload.  Note that even if CURRENT_INSN sets DST, we still
8614	     have to check the reloads.  */
8615	  if (i2 == current_insn)
8616	    {
8617	      for (j = n_reloads - 1; j >= 0; j--)
8618		if ((rld[j].reg_rtx == dst && reload_inherited[j])
8619		    || reload_override_in[j] == dst)
8620		  return;
8621	      /* ??? We can't finish the loop here, because dst might be
8622		 allocated to a pseudo in this block if no reload in this
8623		 block needs any of the classes containing DST - see
8624		 spill_hard_reg.  There is no easy way to tell this, so we
8625		 have to scan till the end of the basic block.  */
8626	    }
8627	  if (reg_set_p (dst, PATTERN (i2)))
8628	    break;
8629	}
8630    }
8631  delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8632  reg_reloaded_contents[REGNO (dst)] = -1;
8633  delete_insn (prev);
8634}
8635
8636/* Output reload-insns to reload VALUE into RELOADREG.
8637   VALUE is an autoincrement or autodecrement RTX whose operand
8638   is a register or memory location;
8639   so reloading involves incrementing that location.
8640   IN is either identical to VALUE, or some cheaper place to reload from.
8641
8642   INC_AMOUNT is the number to increment or decrement by (always positive).
8643   This cannot be deduced from VALUE.  */
8644
8645static void
8646inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
8647{
8648  /* REG or MEM to be copied and incremented.  */
8649  rtx incloc = find_replacement (&XEXP (value, 0));
8650  /* Nonzero if increment after copying.  */
8651  int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
8652	      || GET_CODE (value) == POST_MODIFY);
8653  rtx last;
8654  rtx inc;
8655  rtx add_insn;
8656  int code;
8657  rtx real_in = in == value ? incloc : in;
8658
8659  /* No hard register is equivalent to this register after
8660     inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
8661     we could inc/dec that register as well (maybe even using it for
8662     the source), but I'm not sure it's worth worrying about.  */
8663  if (REG_P (incloc))
8664    reg_last_reload_reg[REGNO (incloc)] = 0;
8665
8666  if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
8667    {
8668      gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
8669      inc = find_replacement (&XEXP (XEXP (value, 1), 1));
8670    }
8671  else
8672    {
8673      if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8674	inc_amount = -inc_amount;
8675
8676      inc = GEN_INT (inc_amount);
8677    }
8678
8679  /* If this is post-increment, first copy the location to the reload reg.  */
8680  if (post && real_in != reloadreg)
8681    emit_insn (gen_move_insn (reloadreg, real_in));
8682
8683  if (in == value)
8684    {
8685      /* See if we can directly increment INCLOC.  Use a method similar to
8686	 that in gen_reload.  */
8687
8688      last = get_last_insn ();
8689      add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8690					 gen_rtx_PLUS (GET_MODE (incloc),
8691						       incloc, inc)));
8692
8693      code = recog_memoized (add_insn);
8694      if (code >= 0)
8695	{
8696	  extract_insn (add_insn);
8697	  if (constrain_operands (1))
8698	    {
8699	      /* If this is a pre-increment and we have incremented the value
8700		 where it lives, copy the incremented value to RELOADREG to
8701		 be used as an address.  */
8702
8703	      if (! post)
8704		emit_insn (gen_move_insn (reloadreg, incloc));
8705	      return;
8706	    }
8707	}
8708      delete_insns_since (last);
8709    }
8710
8711  /* If couldn't do the increment directly, must increment in RELOADREG.
8712     The way we do this depends on whether this is pre- or post-increment.
8713     For pre-increment, copy INCLOC to the reload register, increment it
8714     there, then save back.  */
8715
8716  if (! post)
8717    {
8718      if (in != reloadreg)
8719	emit_insn (gen_move_insn (reloadreg, real_in));
8720      emit_insn (gen_add2_insn (reloadreg, inc));
8721      emit_insn (gen_move_insn (incloc, reloadreg));
8722    }
8723  else
8724    {
8725      /* Postincrement.
8726	 Because this might be a jump insn or a compare, and because RELOADREG
8727	 may not be available after the insn in an input reload, we must do
8728	 the incrementation before the insn being reloaded for.
8729
8730	 We have already copied IN to RELOADREG.  Increment the copy in
8731	 RELOADREG, save that back, then decrement RELOADREG so it has
8732	 the original value.  */
8733
8734      emit_insn (gen_add2_insn (reloadreg, inc));
8735      emit_insn (gen_move_insn (incloc, reloadreg));
8736      if (CONST_INT_P (inc))
8737	emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
8738      else
8739	emit_insn (gen_sub2_insn (reloadreg, inc));
8740    }
8741}
8742
8743#ifdef AUTO_INC_DEC
8744static void
8745add_auto_inc_notes (rtx insn, rtx x)
8746{
8747  enum rtx_code code = GET_CODE (x);
8748  const char *fmt;
8749  int i, j;
8750
8751  if (code == MEM && auto_inc_p (XEXP (x, 0)))
8752    {
8753      add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
8754      return;
8755    }
8756
8757  /* Scan all the operand sub-expressions.  */
8758  fmt = GET_RTX_FORMAT (code);
8759  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8760    {
8761      if (fmt[i] == 'e')
8762	add_auto_inc_notes (insn, XEXP (x, i));
8763      else if (fmt[i] == 'E')
8764	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8765	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
8766    }
8767}
8768#endif
8769
8770/* This is used by reload pass, that does emit some instructions after
8771   abnormal calls moving basic block end, but in fact it wants to emit
8772   them on the edge.  Looks for abnormal call edges, find backward the
8773   proper call and fix the damage.
8774
8775   Similar handle instructions throwing exceptions internally.  */
8776void
8777fixup_abnormal_edges (void)
8778{
8779  bool inserted = false;
8780  basic_block bb;
8781
8782  FOR_EACH_BB (bb)
8783    {
8784      edge e;
8785      edge_iterator ei;
8786
8787      /* Look for cases we are interested in - calls or instructions causing
8788         exceptions.  */
8789      FOR_EACH_EDGE (e, ei, bb->succs)
8790	{
8791	  if (e->flags & EDGE_ABNORMAL_CALL)
8792	    break;
8793	  if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8794	      == (EDGE_ABNORMAL | EDGE_EH))
8795	    break;
8796	}
8797      if (e && !CALL_P (BB_END (bb))
8798	  && !can_throw_internal (BB_END (bb)))
8799	{
8800	  rtx insn;
8801
8802	  /* Get past the new insns generated.  Allow notes, as the insns
8803	     may be already deleted.  */
8804	  insn = BB_END (bb);
8805	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
8806		 && !can_throw_internal (insn)
8807		 && insn != BB_HEAD (bb))
8808	    insn = PREV_INSN (insn);
8809
8810	  if (CALL_P (insn) || can_throw_internal (insn))
8811	    {
8812	      rtx stop, next;
8813
8814	      stop = NEXT_INSN (BB_END (bb));
8815	      BB_END (bb) = insn;
8816	      insn = NEXT_INSN (insn);
8817
8818	      FOR_EACH_EDGE (e, ei, bb->succs)
8819		if (e->flags & EDGE_FALLTHRU)
8820		  break;
8821
8822	      while (insn && insn != stop)
8823		{
8824		  next = NEXT_INSN (insn);
8825		  if (INSN_P (insn))
8826		    {
8827		      delete_insn (insn);
8828
8829		      /* Sometimes there's still the return value USE.
8830			 If it's placed after a trapping call (i.e. that
8831			 call is the last insn anyway), we have no fallthru
8832			 edge.  Simply delete this use and don't try to insert
8833			 on the non-existent edge.  */
8834		      if (GET_CODE (PATTERN (insn)) != USE)
8835			{
8836			  /* We're not deleting it, we're moving it.  */
8837			  INSN_DELETED_P (insn) = 0;
8838			  PREV_INSN (insn) = NULL_RTX;
8839			  NEXT_INSN (insn) = NULL_RTX;
8840
8841			  insert_insn_on_edge (insn, e);
8842			  inserted = true;
8843			}
8844		    }
8845		  else if (!BARRIER_P (insn))
8846		    set_block_for_insn (insn, NULL);
8847		  insn = next;
8848		}
8849	    }
8850
8851	  /* It may be that we don't find any such trapping insn.  In this
8852	     case we discovered quite late that the insn that had been
8853	     marked as can_throw_internal in fact couldn't trap at all.
8854	     So we should in fact delete the EH edges out of the block.  */
8855	  else
8856	    purge_dead_edges (bb);
8857	}
8858    }
8859
8860  /* We've possibly turned single trapping insn into multiple ones.  */
8861  if (flag_non_call_exceptions)
8862    {
8863      sbitmap blocks;
8864      blocks = sbitmap_alloc (last_basic_block);
8865      sbitmap_ones (blocks);
8866      find_many_sub_basic_blocks (blocks);
8867      sbitmap_free (blocks);
8868    }
8869
8870  if (inserted)
8871    commit_edge_insertions ();
8872
8873#ifdef ENABLE_CHECKING
8874  /* Verify that we didn't turn one trapping insn into many, and that
8875     we found and corrected all of the problems wrt fixups on the
8876     fallthru edge.  */
8877  verify_flow_info ();
8878#endif
8879}
8880