1/* Search an insn for pseudo regs that must be in hard regs and are not.
2   Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20/* This file contains subroutines used only from the file reload1.c.
21   It knows how to scan one insn for operands and values
22   that need to be copied into registers to make valid code.
23   It also finds other operands and values which are valid
24   but for which equivalent values in registers exist and
25   ought to be used instead.
26
27   Before processing the first insn of the function, call `init_reload'.
28   init_reload actually has to be called earlier anyway.
29
30   To scan an insn, call `find_reloads'.  This does two things:
31   1. sets up tables describing which values must be reloaded
32   for this insn, and what kind of hard regs they must be reloaded into;
33   2. optionally record the locations where those values appear in
34   the data, so they can be replaced properly later.
35   This is done only if the second arg to `find_reloads' is nonzero.
36
37   The third arg to `find_reloads' specifies the number of levels
38   of indirect addressing supported by the machine.  If it is zero,
39   indirect addressing is not valid.  If it is one, (MEM (REG n))
40   is valid even if (REG n) did not get a hard register; if it is two,
41   (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42   hard register, and similarly for higher values.
43
44   Then you must choose the hard regs to reload those pseudo regs into,
45   and generate appropriate load insns before this insn and perhaps
46   also store insns after this insn.  Set up the array `reload_reg_rtx'
47   to contain the REG rtx's for the registers you used.  In some
48   cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49   for certain reloads.  Then that tells you which register to use,
50   so you do not need to allocate one.  But you still do need to add extra
51   instructions to copy the value into and out of that register.
52
53   Finally you must call `subst_reloads' to substitute the reload reg rtx's
54   into the locations already recorded.
55
56NOTE SIDE EFFECTS:
57
58   find_reloads can alter the operands of the instruction it is called on.
59
60   1. Two operands of any sort may be interchanged, if they are in a
61   commutative instruction.
62   This happens only if find_reloads thinks the instruction will compile
63   better that way.
64
65   2. Pseudo-registers that are equivalent to constants are replaced
66   with those constants if they are not in hard registers.
67
681 happens every time find_reloads is called.
692 happens only when REPLACE is 1, which is only when
70actually doing the reloads, not when just counting them.
71
72Using a reload register for several reloads in one insn:
73
74When an insn has reloads, it is considered as having three parts:
75the input reloads, the insn itself after reloading, and the output reloads.
76Reloads of values used in memory addresses are often needed for only one part.
77
78When this is so, reload_when_needed records which part needs the reload.
79Two reloads for different parts of the insn can share the same reload
80register.
81
82When a reload is used for addresses in multiple parts, or when it is
83an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84a register with any other reload.  */
85
86#define REG_OK_STRICT
87
88/* We do not enable this with ENABLE_CHECKING, since it is awfully slow.  */
89#undef DEBUG_RELOAD
90
91#include "config.h"
92#include "system.h"
93#include "coretypes.h"
94#include "tm.h"
95#include "rtl-error.h"
96#include "tm_p.h"
97#include "insn-config.h"
98#include "symtab.h"
99#include "hashtab.h"
100#include "hash-set.h"
101#include "vec.h"
102#include "machmode.h"
103#include "hard-reg-set.h"
104#include "input.h"
105#include "function.h"
106#include "rtl.h"
107#include "flags.h"
108#include "statistics.h"
109#include "double-int.h"
110#include "real.h"
111#include "fixed-value.h"
112#include "alias.h"
113#include "wide-int.h"
114#include "inchash.h"
115#include "tree.h"
116#include "expmed.h"
117#include "dojump.h"
118#include "explow.h"
119#include "calls.h"
120#include "emit-rtl.h"
121#include "varasm.h"
122#include "stmt.h"
123#include "expr.h"
124#include "insn-codes.h"
125#include "optabs.h"
126#include "recog.h"
127#include "dominance.h"
128#include "cfg.h"
129#include "predict.h"
130#include "basic-block.h"
131#include "df.h"
132#include "reload.h"
133#include "regs.h"
134#include "addresses.h"
135#include "params.h"
136#include "target.h"
137#include "ira.h"
138
139/* True if X is a constant that can be forced into the constant pool.
140   MODE is the mode of the operand, or VOIDmode if not known.  */
141#define CONST_POOL_OK_P(MODE, X)		\
142  ((MODE) != VOIDmode				\
143   && CONSTANT_P (X)				\
144   && GET_CODE (X) != HIGH			\
145   && !targetm.cannot_force_const_mem (MODE, X))
146
147/* True if C is a non-empty register class that has too few registers
148   to be safely used as a reload target class.  */
149
150static inline bool
151small_register_class_p (reg_class_t rclass)
152{
153  return (reg_class_size [(int) rclass] == 1
154	  || (reg_class_size [(int) rclass] >= 1
155	      && targetm.class_likely_spilled_p (rclass)));
156}
157
158
159/* All reloads of the current insn are recorded here.  See reload.h for
160   comments.  */
161int n_reloads;
162struct reload rld[MAX_RELOADS];
163
164/* All the "earlyclobber" operands of the current insn
165   are recorded here.  */
166int n_earlyclobbers;
167rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
168
169int reload_n_operands;
170
171/* Replacing reloads.
172
173   If `replace_reloads' is nonzero, then as each reload is recorded
174   an entry is made for it in the table `replacements'.
175   Then later `subst_reloads' can look through that table and
176   perform all the replacements needed.  */
177
178/* Nonzero means record the places to replace.  */
179static int replace_reloads;
180
181/* Each replacement is recorded with a structure like this.  */
182struct replacement
183{
184  rtx *where;			/* Location to store in */
185  int what;			/* which reload this is for */
186  machine_mode mode;	/* mode it must have */
187};
188
189static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
190
191/* Number of replacements currently recorded.  */
192static int n_replacements;
193
194/* Used to track what is modified by an operand.  */
195struct decomposition
196{
197  int reg_flag;		/* Nonzero if referencing a register.  */
198  int safe;		/* Nonzero if this can't conflict with anything.  */
199  rtx base;		/* Base address for MEM.  */
200  HOST_WIDE_INT start;	/* Starting offset or register number.  */
201  HOST_WIDE_INT end;	/* Ending offset or register number.  */
202};
203
204#ifdef SECONDARY_MEMORY_NEEDED
205
206/* Save MEMs needed to copy from one class of registers to another.  One MEM
207   is used per mode, but normally only one or two modes are ever used.
208
209   We keep two versions, before and after register elimination.  The one
210   after register elimination is record separately for each operand.  This
211   is done in case the address is not valid to be sure that we separately
212   reload each.  */
213
214static rtx secondary_memlocs[NUM_MACHINE_MODES];
215static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
216static int secondary_memlocs_elim_used = 0;
217#endif
218
219/* The instruction we are doing reloads for;
220   so we can test whether a register dies in it.  */
221static rtx_insn *this_insn;
222
223/* Nonzero if this instruction is a user-specified asm with operands.  */
224static int this_insn_is_asm;
225
226/* If hard_regs_live_known is nonzero,
227   we can tell which hard regs are currently live,
228   at least enough to succeed in choosing dummy reloads.  */
229static int hard_regs_live_known;
230
231/* Indexed by hard reg number,
232   element is nonnegative if hard reg has been spilled.
233   This vector is passed to `find_reloads' as an argument
234   and is not changed here.  */
235static short *static_reload_reg_p;
236
237/* Set to 1 in subst_reg_equivs if it changes anything.  */
238static int subst_reg_equivs_changed;
239
240/* On return from push_reload, holds the reload-number for the OUT
241   operand, which can be different for that from the input operand.  */
242static int output_reloadnum;
243
244  /* Compare two RTX's.  */
245#define MATCHES(x, y) \
246 (x == y || (x != 0 && (REG_P (x)				\
247			? REG_P (y) && REGNO (x) == REGNO (y)	\
248			: rtx_equal_p (x, y) && ! side_effects_p (x))))
249
250  /* Indicates if two reloads purposes are for similar enough things that we
251     can merge their reloads.  */
252#define MERGABLE_RELOADS(when1, when2, op1, op2) \
253  ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER	\
254   || ((when1) == (when2) && (op1) == (op2))		\
255   || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
256   || ((when1) == RELOAD_FOR_OPERAND_ADDRESS		\
257       && (when2) == RELOAD_FOR_OPERAND_ADDRESS)	\
258   || ((when1) == RELOAD_FOR_OTHER_ADDRESS		\
259       && (when2) == RELOAD_FOR_OTHER_ADDRESS))
260
261  /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged.  */
262#define MERGE_TO_OTHER(when1, when2, op1, op2) \
263  ((when1) != (when2)					\
264   || ! ((op1) == (op2)					\
265	 || (when1) == RELOAD_FOR_INPUT			\
266	 || (when1) == RELOAD_FOR_OPERAND_ADDRESS	\
267	 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
268
269  /* If we are going to reload an address, compute the reload type to
270     use.  */
271#define ADDR_TYPE(type)					\
272  ((type) == RELOAD_FOR_INPUT_ADDRESS			\
273   ? RELOAD_FOR_INPADDR_ADDRESS				\
274   : ((type) == RELOAD_FOR_OUTPUT_ADDRESS		\
275      ? RELOAD_FOR_OUTADDR_ADDRESS			\
276      : (type)))
277
278static int push_secondary_reload (int, rtx, int, int, enum reg_class,
279				  machine_mode, enum reload_type,
280				  enum insn_code *, secondary_reload_info *);
281static enum reg_class find_valid_class (machine_mode, machine_mode,
282					int, unsigned int);
283static void push_replacement (rtx *, int, machine_mode);
284static void dup_replacements (rtx *, rtx *);
285static void combine_reloads (void);
286static int find_reusable_reload (rtx *, rtx, enum reg_class,
287				 enum reload_type, int, int);
288static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
289			      machine_mode, reg_class_t, int, int);
290static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
291static struct decomposition decompose (rtx);
292static int immune_p (rtx, rtx, struct decomposition);
293static bool alternative_allows_const_pool_ref (rtx, const char *, int);
294static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
295				rtx_insn *, int *);
296static rtx make_memloc (rtx, int);
297static int maybe_memory_address_addr_space_p (machine_mode, rtx,
298					      addr_space_t, rtx *);
299static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
300				 int, enum reload_type, int, rtx_insn *);
301static rtx subst_reg_equivs (rtx, rtx_insn *);
302static rtx subst_indexed_address (rtx);
303static void update_auto_inc_notes (rtx_insn *, int, int);
304static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
305				   enum rtx_code, enum rtx_code, rtx *,
306				   int, enum reload_type,int, rtx_insn *);
307static void find_reloads_address_part (rtx, rtx *, enum reg_class,
308				       machine_mode, int,
309				       enum reload_type, int);
310static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
311					int, rtx_insn *, int *);
312static void copy_replacements_1 (rtx *, rtx *, int);
313static int find_inc_amount (rtx, rtx);
314static int refers_to_mem_for_reload_p (rtx);
315static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
316					 rtx, rtx *);
317
318/* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
319   list yet.  */
320
321static void
322push_reg_equiv_alt_mem (int regno, rtx mem)
323{
324  rtx it;
325
326  for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
327    if (rtx_equal_p (XEXP (it, 0), mem))
328      return;
329
330  reg_equiv_alt_mem_list (regno)
331    = alloc_EXPR_LIST (REG_EQUIV, mem,
332		       reg_equiv_alt_mem_list (regno));
333}
334
335/* Determine if any secondary reloads are needed for loading (if IN_P is
336   nonzero) or storing (if IN_P is zero) X to or from a reload register of
337   register class RELOAD_CLASS in mode RELOAD_MODE.  If secondary reloads
338   are needed, push them.
339
340   Return the reload number of the secondary reload we made, or -1 if
341   we didn't need one.  *PICODE is set to the insn_code to use if we do
342   need a secondary reload.  */
343
344static int
345push_secondary_reload (int in_p, rtx x, int opnum, int optional,
346		       enum reg_class reload_class,
347		       machine_mode reload_mode, enum reload_type type,
348		       enum insn_code *picode, secondary_reload_info *prev_sri)
349{
350  enum reg_class rclass = NO_REGS;
351  enum reg_class scratch_class;
352  machine_mode mode = reload_mode;
353  enum insn_code icode = CODE_FOR_nothing;
354  enum insn_code t_icode = CODE_FOR_nothing;
355  enum reload_type secondary_type;
356  int s_reload, t_reload = -1;
357  const char *scratch_constraint;
358  secondary_reload_info sri;
359
360  if (type == RELOAD_FOR_INPUT_ADDRESS
361      || type == RELOAD_FOR_OUTPUT_ADDRESS
362      || type == RELOAD_FOR_INPADDR_ADDRESS
363      || type == RELOAD_FOR_OUTADDR_ADDRESS)
364    secondary_type = type;
365  else
366    secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
367
368  *picode = CODE_FOR_nothing;
369
370  /* If X is a paradoxical SUBREG, use the inner value to determine both the
371     mode and object being reloaded.  */
372  if (paradoxical_subreg_p (x))
373    {
374      x = SUBREG_REG (x);
375      reload_mode = GET_MODE (x);
376    }
377
378  /* If X is a pseudo-register that has an equivalent MEM (actually, if it
379     is still a pseudo-register by now, it *must* have an equivalent MEM
380     but we don't want to assume that), use that equivalent when seeing if
381     a secondary reload is needed since whether or not a reload is needed
382     might be sensitive to the form of the MEM.  */
383
384  if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
385      && reg_equiv_mem (REGNO (x)))
386    x = reg_equiv_mem (REGNO (x));
387
388  sri.icode = CODE_FOR_nothing;
389  sri.prev_sri = prev_sri;
390  rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
391						      reload_mode, &sri);
392  icode = (enum insn_code) sri.icode;
393
394  /* If we don't need any secondary registers, done.  */
395  if (rclass == NO_REGS && icode == CODE_FOR_nothing)
396    return -1;
397
398  if (rclass != NO_REGS)
399    t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
400				      reload_mode, type, &t_icode, &sri);
401
402  /* If we will be using an insn, the secondary reload is for a
403     scratch register.  */
404
405  if (icode != CODE_FOR_nothing)
406    {
407      /* If IN_P is nonzero, the reload register will be the output in
408	 operand 0.  If IN_P is zero, the reload register will be the input
409	 in operand 1.  Outputs should have an initial "=", which we must
410	 skip.  */
411
412      /* ??? It would be useful to be able to handle only two, or more than
413	 three, operands, but for now we can only handle the case of having
414	 exactly three: output, input and one temp/scratch.  */
415      gcc_assert (insn_data[(int) icode].n_operands == 3);
416
417      /* ??? We currently have no way to represent a reload that needs
418	 an icode to reload from an intermediate tertiary reload register.
419	 We should probably have a new field in struct reload to tag a
420	 chain of scratch operand reloads onto.   */
421      gcc_assert (rclass == NO_REGS);
422
423      scratch_constraint = insn_data[(int) icode].operand[2].constraint;
424      gcc_assert (*scratch_constraint == '=');
425      scratch_constraint++;
426      if (*scratch_constraint == '&')
427	scratch_constraint++;
428      scratch_class = (reg_class_for_constraint
429		       (lookup_constraint (scratch_constraint)));
430
431      rclass = scratch_class;
432      mode = insn_data[(int) icode].operand[2].mode;
433    }
434
435  /* This case isn't valid, so fail.  Reload is allowed to use the same
436     register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
437     in the case of a secondary register, we actually need two different
438     registers for correct code.  We fail here to prevent the possibility of
439     silently generating incorrect code later.
440
441     The convention is that secondary input reloads are valid only if the
442     secondary_class is different from class.  If you have such a case, you
443     can not use secondary reloads, you must work around the problem some
444     other way.
445
446     Allow this when a reload_in/out pattern is being used.  I.e. assume
447     that the generated code handles this case.  */
448
449  gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
450	      || t_icode != CODE_FOR_nothing);
451
452  /* See if we can reuse an existing secondary reload.  */
453  for (s_reload = 0; s_reload < n_reloads; s_reload++)
454    if (rld[s_reload].secondary_p
455	&& (reg_class_subset_p (rclass, rld[s_reload].rclass)
456	    || reg_class_subset_p (rld[s_reload].rclass, rclass))
457	&& ((in_p && rld[s_reload].inmode == mode)
458	    || (! in_p && rld[s_reload].outmode == mode))
459	&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
460	    || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
461	&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
462	    || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
463	&& (small_register_class_p (rclass)
464	    || targetm.small_register_classes_for_mode_p (VOIDmode))
465	&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
466			     opnum, rld[s_reload].opnum))
467      {
468	if (in_p)
469	  rld[s_reload].inmode = mode;
470	if (! in_p)
471	  rld[s_reload].outmode = mode;
472
473	if (reg_class_subset_p (rclass, rld[s_reload].rclass))
474	  rld[s_reload].rclass = rclass;
475
476	rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
477	rld[s_reload].optional &= optional;
478	rld[s_reload].secondary_p = 1;
479	if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
480			    opnum, rld[s_reload].opnum))
481	  rld[s_reload].when_needed = RELOAD_OTHER;
482
483	break;
484      }
485
486  if (s_reload == n_reloads)
487    {
488#ifdef SECONDARY_MEMORY_NEEDED
489      /* If we need a memory location to copy between the two reload regs,
490	 set it up now.  Note that we do the input case before making
491	 the reload and the output case after.  This is due to the
492	 way reloads are output.  */
493
494      if (in_p && icode == CODE_FOR_nothing
495	  && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
496	{
497	  get_secondary_mem (x, reload_mode, opnum, type);
498
499	  /* We may have just added new reloads.  Make sure we add
500	     the new reload at the end.  */
501	  s_reload = n_reloads;
502	}
503#endif
504
505      /* We need to make a new secondary reload for this register class.  */
506      rld[s_reload].in = rld[s_reload].out = 0;
507      rld[s_reload].rclass = rclass;
508
509      rld[s_reload].inmode = in_p ? mode : VOIDmode;
510      rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
511      rld[s_reload].reg_rtx = 0;
512      rld[s_reload].optional = optional;
513      rld[s_reload].inc = 0;
514      /* Maybe we could combine these, but it seems too tricky.  */
515      rld[s_reload].nocombine = 1;
516      rld[s_reload].in_reg = 0;
517      rld[s_reload].out_reg = 0;
518      rld[s_reload].opnum = opnum;
519      rld[s_reload].when_needed = secondary_type;
520      rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
521      rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
522      rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
523      rld[s_reload].secondary_out_icode
524	= ! in_p ? t_icode : CODE_FOR_nothing;
525      rld[s_reload].secondary_p = 1;
526
527      n_reloads++;
528
529#ifdef SECONDARY_MEMORY_NEEDED
530      if (! in_p && icode == CODE_FOR_nothing
531	  && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
532	get_secondary_mem (x, mode, opnum, type);
533#endif
534    }
535
536  *picode = icode;
537  return s_reload;
538}
539
540/* If a secondary reload is needed, return its class.  If both an intermediate
541   register and a scratch register is needed, we return the class of the
542   intermediate register.  */
543reg_class_t
544secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
545			rtx x)
546{
547  enum insn_code icode;
548  secondary_reload_info sri;
549
550  sri.icode = CODE_FOR_nothing;
551  sri.prev_sri = NULL;
552  rclass
553    = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
554  icode = (enum insn_code) sri.icode;
555
556  /* If there are no secondary reloads at all, we return NO_REGS.
557     If an intermediate register is needed, we return its class.  */
558  if (icode == CODE_FOR_nothing || rclass != NO_REGS)
559    return rclass;
560
561  /* No intermediate register is needed, but we have a special reload
562     pattern, which we assume for now needs a scratch register.  */
563  return scratch_reload_class (icode);
564}
565
566/* ICODE is the insn_code of a reload pattern.  Check that it has exactly
567   three operands, verify that operand 2 is an output operand, and return
568   its register class.
569   ??? We'd like to be able to handle any pattern with at least 2 operands,
570   for zero or more scratch registers, but that needs more infrastructure.  */
571enum reg_class
572scratch_reload_class (enum insn_code icode)
573{
574  const char *scratch_constraint;
575  enum reg_class rclass;
576
577  gcc_assert (insn_data[(int) icode].n_operands == 3);
578  scratch_constraint = insn_data[(int) icode].operand[2].constraint;
579  gcc_assert (*scratch_constraint == '=');
580  scratch_constraint++;
581  if (*scratch_constraint == '&')
582    scratch_constraint++;
583  rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
584  gcc_assert (rclass != NO_REGS);
585  return rclass;
586}
587
588#ifdef SECONDARY_MEMORY_NEEDED
589
590/* Return a memory location that will be used to copy X in mode MODE.
591   If we haven't already made a location for this mode in this insn,
592   call find_reloads_address on the location being returned.  */
593
594rtx
595get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
596		   int opnum, enum reload_type type)
597{
598  rtx loc;
599  int mem_valid;
600
601  /* By default, if MODE is narrower than a word, widen it to a word.
602     This is required because most machines that require these memory
603     locations do not support short load and stores from all registers
604     (e.g., FP registers).  */
605
606#ifdef SECONDARY_MEMORY_NEEDED_MODE
607  mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
608#else
609  if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
610    mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
611#endif
612
613  /* If we already have made a MEM for this operand in MODE, return it.  */
614  if (secondary_memlocs_elim[(int) mode][opnum] != 0)
615    return secondary_memlocs_elim[(int) mode][opnum];
616
617  /* If this is the first time we've tried to get a MEM for this mode,
618     allocate a new one.  `something_changed' in reload will get set
619     by noticing that the frame size has changed.  */
620
621  if (secondary_memlocs[(int) mode] == 0)
622    {
623#ifdef SECONDARY_MEMORY_NEEDED_RTX
624      secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
625#else
626      secondary_memlocs[(int) mode]
627	= assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
628#endif
629    }
630
631  /* Get a version of the address doing any eliminations needed.  If that
632     didn't give us a new MEM, make a new one if it isn't valid.  */
633
634  loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
635  mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
636						  MEM_ADDR_SPACE (loc));
637
638  if (! mem_valid && loc == secondary_memlocs[(int) mode])
639    loc = copy_rtx (loc);
640
641  /* The only time the call below will do anything is if the stack
642     offset is too large.  In that case IND_LEVELS doesn't matter, so we
643     can just pass a zero.  Adjust the type to be the address of the
644     corresponding object.  If the address was valid, save the eliminated
645     address.  If it wasn't valid, we need to make a reload each time, so
646     don't save it.  */
647
648  if (! mem_valid)
649    {
650      type =  (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
651	       : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
652	       : RELOAD_OTHER);
653
654      find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
655			    opnum, type, 0, 0);
656    }
657
658  secondary_memlocs_elim[(int) mode][opnum] = loc;
659  if (secondary_memlocs_elim_used <= (int)mode)
660    secondary_memlocs_elim_used = (int)mode + 1;
661  return loc;
662}
663
664/* Clear any secondary memory locations we've made.  */
665
666void
667clear_secondary_mem (void)
668{
669  memset (secondary_memlocs, 0, sizeof secondary_memlocs);
670}
671#endif /* SECONDARY_MEMORY_NEEDED */
672
673
674/* Find the largest class which has at least one register valid in
675   mode INNER, and which for every such register, that register number
676   plus N is also valid in OUTER (if in range) and is cheap to move
677   into REGNO.  Such a class must exist.  */
678
679static enum reg_class
680find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
681		  machine_mode inner ATTRIBUTE_UNUSED, int n,
682		  unsigned int dest_regno ATTRIBUTE_UNUSED)
683{
684  int best_cost = -1;
685  int rclass;
686  int regno;
687  enum reg_class best_class = NO_REGS;
688  enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
689  unsigned int best_size = 0;
690  int cost;
691
692  for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
693    {
694      int bad = 0;
695      int good = 0;
696      for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
697	if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
698	  {
699	    if (HARD_REGNO_MODE_OK (regno, inner))
700	      {
701		good = 1;
702		if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
703		    && ! HARD_REGNO_MODE_OK (regno + n, outer))
704		  bad = 1;
705	      }
706	  }
707
708      if (bad || !good)
709	continue;
710      cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
711
712      if ((reg_class_size[rclass] > best_size
713	   && (best_cost < 0 || best_cost >= cost))
714	  || best_cost > cost)
715	{
716	  best_class = (enum reg_class) rclass;
717	  best_size = reg_class_size[rclass];
718	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
719					  dest_class);
720	}
721    }
722
723  gcc_assert (best_size != 0);
724
725  return best_class;
726}
727
728/* We are trying to reload a subreg of something that is not a register.
729   Find the largest class which contains only registers valid in
730   mode MODE.  OUTER is the mode of the subreg, DEST_CLASS the class in
731   which we would eventually like to obtain the object.  */
732
733static enum reg_class
734find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
735		    machine_mode mode ATTRIBUTE_UNUSED,
736		    enum reg_class dest_class ATTRIBUTE_UNUSED)
737{
738  int best_cost = -1;
739  int rclass;
740  int regno;
741  enum reg_class best_class = NO_REGS;
742  unsigned int best_size = 0;
743  int cost;
744
745  for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
746    {
747      int bad = 0;
748      for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
749	{
750	  if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
751	      && !HARD_REGNO_MODE_OK (regno, mode))
752	    bad = 1;
753	}
754
755      if (bad)
756	continue;
757
758      cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
759
760      if ((reg_class_size[rclass] > best_size
761	   && (best_cost < 0 || best_cost >= cost))
762	  || best_cost > cost)
763	{
764	  best_class = (enum reg_class) rclass;
765	  best_size = reg_class_size[rclass];
766	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
767					  dest_class);
768	}
769    }
770
771  gcc_assert (best_size != 0);
772
773#ifdef LIMIT_RELOAD_CLASS
774  best_class = LIMIT_RELOAD_CLASS (mode, best_class);
775#endif
776  return best_class;
777}
778
779/* Return the number of a previously made reload that can be combined with
780   a new one, or n_reloads if none of the existing reloads can be used.
781   OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
782   push_reload, they determine the kind of the new reload that we try to
783   combine.  P_IN points to the corresponding value of IN, which can be
784   modified by this function.
785   DONT_SHARE is nonzero if we can't share any input-only reload for IN.  */
786
787static int
788find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
789		      enum reload_type type, int opnum, int dont_share)
790{
791  rtx in = *p_in;
792  int i;
793  /* We can't merge two reloads if the output of either one is
794     earlyclobbered.  */
795
796  if (earlyclobber_operand_p (out))
797    return n_reloads;
798
799  /* We can use an existing reload if the class is right
800     and at least one of IN and OUT is a match
801     and the other is at worst neutral.
802     (A zero compared against anything is neutral.)
803
804     For targets with small register classes, don't use existing reloads
805     unless they are for the same thing since that can cause us to need
806     more reload registers than we otherwise would.  */
807
808  for (i = 0; i < n_reloads; i++)
809    if ((reg_class_subset_p (rclass, rld[i].rclass)
810	 || reg_class_subset_p (rld[i].rclass, rclass))
811	/* If the existing reload has a register, it must fit our class.  */
812	&& (rld[i].reg_rtx == 0
813	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
814				  true_regnum (rld[i].reg_rtx)))
815	&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
816	     && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
817	    || (out != 0 && MATCHES (rld[i].out, out)
818		&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
819	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
820	&& (small_register_class_p (rclass)
821	    || targetm.small_register_classes_for_mode_p (VOIDmode))
822	&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
823      return i;
824
825  /* Reloading a plain reg for input can match a reload to postincrement
826     that reg, since the postincrement's value is the right value.
827     Likewise, it can match a preincrement reload, since we regard
828     the preincrementation as happening before any ref in this insn
829     to that register.  */
830  for (i = 0; i < n_reloads; i++)
831    if ((reg_class_subset_p (rclass, rld[i].rclass)
832	 || reg_class_subset_p (rld[i].rclass, rclass))
833	/* If the existing reload has a register, it must fit our
834	   class.  */
835	&& (rld[i].reg_rtx == 0
836	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
837				  true_regnum (rld[i].reg_rtx)))
838	&& out == 0 && rld[i].out == 0 && rld[i].in != 0
839	&& ((REG_P (in)
840	     && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
841	     && MATCHES (XEXP (rld[i].in, 0), in))
842	    || (REG_P (rld[i].in)
843		&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
844		&& MATCHES (XEXP (in, 0), rld[i].in)))
845	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
846	&& (small_register_class_p (rclass)
847	    || targetm.small_register_classes_for_mode_p (VOIDmode))
848	&& MERGABLE_RELOADS (type, rld[i].when_needed,
849			     opnum, rld[i].opnum))
850      {
851	/* Make sure reload_in ultimately has the increment,
852	   not the plain register.  */
853	if (REG_P (in))
854	  *p_in = rld[i].in;
855	return i;
856      }
857  return n_reloads;
858}
859
860/* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
861   expression.  MODE is the mode that X will be used in.  OUTPUT is true if
862   the function is invoked for the output part of an enclosing reload.  */
863
864static bool
865reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
866{
867  rtx inner;
868
869  /* Only SUBREGs are problematical.  */
870  if (GET_CODE (x) != SUBREG)
871    return false;
872
873  inner = SUBREG_REG (x);
874
875  /* If INNER is a constant or PLUS, then INNER will need reloading.  */
876  if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
877    return true;
878
879  /* If INNER is not a hard register, then INNER will not need reloading.  */
880  if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
881    return false;
882
883  /* If INNER is not ok for MODE, then INNER will need reloading.  */
884  if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
885    return true;
886
887  /* If this is for an output, and the outer part is a word or smaller,
888     INNER is larger than a word and the number of registers in INNER is
889     not the same as the number of words in INNER, then INNER will need
890     reloading (with an in-out reload).  */
891  return (output
892	  && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
893	  && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
894	  && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
895	      != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
896}
897
898/* Return nonzero if IN can be reloaded into REGNO with mode MODE without
899   requiring an extra reload register.  The caller has already found that
900   IN contains some reference to REGNO, so check that we can produce the
901   new value in a single step.  E.g. if we have
902   (set (reg r13) (plus (reg r13) (const int 1))), and there is an
903   instruction that adds one to a register, this should succeed.
904   However, if we have something like
905   (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
906   needs to be loaded into a register first, we need a separate reload
907   register.
908   Such PLUS reloads are generated by find_reload_address_part.
909   The out-of-range PLUS expressions are usually introduced in the instruction
910   patterns by register elimination and substituting pseudos without a home
911   by their function-invariant equivalences.  */
912static int
913can_reload_into (rtx in, int regno, machine_mode mode)
914{
915  rtx dst;
916  rtx_insn *test_insn;
917  int r = 0;
918  struct recog_data_d save_recog_data;
919
920  /* For matching constraints, we often get notional input reloads where
921     we want to use the original register as the reload register.  I.e.
922     technically this is a non-optional input-output reload, but IN is
923     already a valid register, and has been chosen as the reload register.
924     Speed this up, since it trivially works.  */
925  if (REG_P (in))
926    return 1;
927
928  /* To test MEMs properly, we'd have to take into account all the reloads
929     that are already scheduled, which can become quite complicated.
930     And since we've already handled address reloads for this MEM, it
931     should always succeed anyway.  */
932  if (MEM_P (in))
933    return 1;
934
935  /* If we can make a simple SET insn that does the job, everything should
936     be fine.  */
937  dst =  gen_rtx_REG (mode, regno);
938  test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
939  save_recog_data = recog_data;
940  if (recog_memoized (test_insn) >= 0)
941    {
942      extract_insn (test_insn);
943      r = constrain_operands (1, get_enabled_alternatives (test_insn));
944    }
945  recog_data = save_recog_data;
946  return r;
947}
948
949/* Record one reload that needs to be performed.
950   IN is an rtx saying where the data are to be found before this instruction.
951   OUT says where they must be stored after the instruction.
952   (IN is zero for data not read, and OUT is zero for data not written.)
953   INLOC and OUTLOC point to the places in the instructions where
954   IN and OUT were found.
955   If IN and OUT are both nonzero, it means the same register must be used
956   to reload both IN and OUT.
957
958   RCLASS is a register class required for the reloaded data.
959   INMODE is the machine mode that the instruction requires
960   for the reg that replaces IN and OUTMODE is likewise for OUT.
961
962   If IN is zero, then OUT's location and mode should be passed as
963   INLOC and INMODE.
964
965   STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
966
967   OPTIONAL nonzero means this reload does not need to be performed:
968   it can be discarded if that is more convenient.
969
970   OPNUM and TYPE say what the purpose of this reload is.
971
972   The return value is the reload-number for this reload.
973
974   If both IN and OUT are nonzero, in some rare cases we might
975   want to make two separate reloads.  (Actually we never do this now.)
976   Therefore, the reload-number for OUT is stored in
977   output_reloadnum when we return; the return value applies to IN.
978   Usually (presently always), when IN and OUT are nonzero,
979   the two reload-numbers are equal, but the caller should be careful to
980   distinguish them.  */
981
982int
983push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
984	     enum reg_class rclass, machine_mode inmode,
985	     machine_mode outmode, int strict_low, int optional,
986	     int opnum, enum reload_type type)
987{
988  int i;
989  int dont_share = 0;
990  int dont_remove_subreg = 0;
991#ifdef LIMIT_RELOAD_CLASS
992  rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
993#endif
994  int secondary_in_reload = -1, secondary_out_reload = -1;
995  enum insn_code secondary_in_icode = CODE_FOR_nothing;
996  enum insn_code secondary_out_icode = CODE_FOR_nothing;
997  enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
998  subreg_in_class = NO_REGS;
999
1000  /* INMODE and/or OUTMODE could be VOIDmode if no mode
1001     has been specified for the operand.  In that case,
1002     use the operand's mode as the mode to reload.  */
1003  if (inmode == VOIDmode && in != 0)
1004    inmode = GET_MODE (in);
1005  if (outmode == VOIDmode && out != 0)
1006    outmode = GET_MODE (out);
1007
1008  /* If find_reloads and friends until now missed to replace a pseudo
1009     with a constant of reg_equiv_constant something went wrong
1010     beforehand.
1011     Note that it can't simply be done here if we missed it earlier
1012     since the constant might need to be pushed into the literal pool
1013     and the resulting memref would probably need further
1014     reloading.  */
1015  if (in != 0 && REG_P (in))
1016    {
1017      int regno = REGNO (in);
1018
1019      gcc_assert (regno < FIRST_PSEUDO_REGISTER
1020		  || reg_renumber[regno] >= 0
1021		  || reg_equiv_constant (regno) == NULL_RTX);
1022    }
1023
1024  /* reg_equiv_constant only contains constants which are obviously
1025     not appropriate as destination.  So if we would need to replace
1026     the destination pseudo with a constant we are in real
1027     trouble.  */
1028  if (out != 0 && REG_P (out))
1029    {
1030      int regno = REGNO (out);
1031
1032      gcc_assert (regno < FIRST_PSEUDO_REGISTER
1033		  || reg_renumber[regno] >= 0
1034		  || reg_equiv_constant (regno) == NULL_RTX);
1035    }
1036
1037  /* If we have a read-write operand with an address side-effect,
1038     change either IN or OUT so the side-effect happens only once.  */
1039  if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1040    switch (GET_CODE (XEXP (in, 0)))
1041      {
1042      case POST_INC: case POST_DEC:   case POST_MODIFY:
1043	in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1044	break;
1045
1046      case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1047	out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1048	break;
1049
1050      default:
1051	break;
1052      }
1053
1054  /* If we are reloading a (SUBREG constant ...), really reload just the
1055     inside expression in its own mode.  Similarly for (SUBREG (PLUS ...)).
1056     If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1057     a pseudo and hence will become a MEM) with M1 wider than M2 and the
1058     register is a pseudo, also reload the inside expression.
1059     For machines that extend byte loads, do this for any SUBREG of a pseudo
1060     where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1061     M2 is an integral mode that gets extended when loaded.
1062     Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1063     where either M1 is not valid for R or M2 is wider than a word but we
1064     only need one register to store an M2-sized quantity in R.
1065     (However, if OUT is nonzero, we need to reload the reg *and*
1066     the subreg, so do nothing here, and let following statement handle it.)
1067
1068     Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1069     we can't handle it here because CONST_INT does not indicate a mode.
1070
1071     Similarly, we must reload the inside expression if we have a
1072     STRICT_LOW_PART (presumably, in == out in this case).
1073
1074     Also reload the inner expression if it does not require a secondary
1075     reload but the SUBREG does.
1076
1077     Finally, reload the inner expression if it is a register that is in
1078     the class whose registers cannot be referenced in a different size
1079     and M1 is not the same size as M2.  If subreg_lowpart_p is false, we
1080     cannot reload just the inside since we might end up with the wrong
1081     register class.  But if it is inside a STRICT_LOW_PART, we have
1082     no choice, so we hope we do get the right register class there.  */
1083
1084  if (in != 0 && GET_CODE (in) == SUBREG
1085      && (subreg_lowpart_p (in) || strict_low)
1086#ifdef CANNOT_CHANGE_MODE_CLASS
1087      && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1088#endif
1089      && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1090      && (CONSTANT_P (SUBREG_REG (in))
1091	  || GET_CODE (SUBREG_REG (in)) == PLUS
1092	  || strict_low
1093	  || (((REG_P (SUBREG_REG (in))
1094		&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1095	       || MEM_P (SUBREG_REG (in)))
1096	      && ((GET_MODE_PRECISION (inmode)
1097		   > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1098#ifdef LOAD_EXTEND_OP
1099		  || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1100		      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1101			  <= UNITS_PER_WORD)
1102		      && (GET_MODE_PRECISION (inmode)
1103			  > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1104		      && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1105		      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1106#endif
1107#ifdef WORD_REGISTER_OPERATIONS
1108		  || ((GET_MODE_PRECISION (inmode)
1109		       < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1110		      && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1111			  ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1112			   / UNITS_PER_WORD)))
1113#endif
1114		  ))
1115	  || (REG_P (SUBREG_REG (in))
1116	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1117	      /* The case where out is nonzero
1118		 is handled differently in the following statement.  */
1119	      && (out == 0 || subreg_lowpart_p (in))
1120	      && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1121		   && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1122		       > UNITS_PER_WORD)
1123		   && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1124			/ UNITS_PER_WORD)
1125		       != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1126						[GET_MODE (SUBREG_REG (in))]))
1127		  || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1128	  || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1129	      && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1130					  SUBREG_REG (in))
1131		  == NO_REGS))
1132#ifdef CANNOT_CHANGE_MODE_CLASS
1133	  || (REG_P (SUBREG_REG (in))
1134	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1135	      && REG_CANNOT_CHANGE_MODE_P
1136	      (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1137#endif
1138	  ))
1139    {
1140#ifdef LIMIT_RELOAD_CLASS
1141      in_subreg_loc = inloc;
1142#endif
1143      inloc = &SUBREG_REG (in);
1144      in = *inloc;
1145#if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1146      if (MEM_P (in))
1147	/* This is supposed to happen only for paradoxical subregs made by
1148	   combine.c.  (SUBREG (MEM)) isn't supposed to occur other ways.  */
1149	gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1150#endif
1151      inmode = GET_MODE (in);
1152    }
1153
1154  /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1155     where M1 is not valid for R if it was not handled by the code above.
1156
1157     Similar issue for (SUBREG constant ...) if it was not handled by the
1158     code above.  This can happen if SUBREG_BYTE != 0.
1159
1160     However, we must reload the inner reg *as well as* the subreg in
1161     that case.  */
1162
1163  if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1164    {
1165      if (REG_P (SUBREG_REG (in)))
1166	subreg_in_class
1167	  = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1168			      subreg_regno_offset (REGNO (SUBREG_REG (in)),
1169						   GET_MODE (SUBREG_REG (in)),
1170						   SUBREG_BYTE (in),
1171						   GET_MODE (in)),
1172			      REGNO (SUBREG_REG (in)));
1173      else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1174	subreg_in_class = find_valid_class_1 (inmode,
1175					      GET_MODE (SUBREG_REG (in)),
1176					      rclass);
1177
1178      /* This relies on the fact that emit_reload_insns outputs the
1179	 instructions for input reloads of type RELOAD_OTHER in the same
1180	 order as the reloads.  Thus if the outer reload is also of type
1181	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1182	 output before the outer reload.  */
1183      push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1184		   subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1185      dont_remove_subreg = 1;
1186    }
1187
1188  /* Similarly for paradoxical and problematical SUBREGs on the output.
1189     Note that there is no reason we need worry about the previous value
1190     of SUBREG_REG (out); even if wider than out, storing in a subreg is
1191     entitled to clobber it all (except in the case of a word mode subreg
1192     or of a STRICT_LOW_PART, in that latter case the constraint should
1193     label it input-output.)  */
1194  if (out != 0 && GET_CODE (out) == SUBREG
1195      && (subreg_lowpart_p (out) || strict_low)
1196#ifdef CANNOT_CHANGE_MODE_CLASS
1197      && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1198#endif
1199      && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1200      && (CONSTANT_P (SUBREG_REG (out))
1201	  || strict_low
1202	  || (((REG_P (SUBREG_REG (out))
1203		&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1204	       || MEM_P (SUBREG_REG (out)))
1205	      && ((GET_MODE_PRECISION (outmode)
1206		   > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1207#ifdef WORD_REGISTER_OPERATIONS
1208		  || ((GET_MODE_PRECISION (outmode)
1209		       < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1210		      && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1211			  ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1212			   / UNITS_PER_WORD)))
1213#endif
1214		  ))
1215	  || (REG_P (SUBREG_REG (out))
1216	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1217	      /* The case of a word mode subreg
1218		 is handled differently in the following statement.  */
1219	      && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1220		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1221		        > UNITS_PER_WORD))
1222	      && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1223	  || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1224	      && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1225					  SUBREG_REG (out))
1226		  == NO_REGS))
1227#ifdef CANNOT_CHANGE_MODE_CLASS
1228	  || (REG_P (SUBREG_REG (out))
1229	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1230	      && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1231					   GET_MODE (SUBREG_REG (out)),
1232					   outmode))
1233#endif
1234	  ))
1235    {
1236#ifdef LIMIT_RELOAD_CLASS
1237      out_subreg_loc = outloc;
1238#endif
1239      outloc = &SUBREG_REG (out);
1240      out = *outloc;
1241#if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1242      gcc_assert (!MEM_P (out)
1243		  || GET_MODE_SIZE (GET_MODE (out))
1244		     <= GET_MODE_SIZE (outmode));
1245#endif
1246      outmode = GET_MODE (out);
1247    }
1248
1249  /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1250     where either M1 is not valid for R or M2 is wider than a word but we
1251     only need one register to store an M2-sized quantity in R.
1252
1253     However, we must reload the inner reg *as well as* the subreg in
1254     that case and the inner reg is an in-out reload.  */
1255
1256  if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1257    {
1258      enum reg_class in_out_class
1259	= find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1260			    subreg_regno_offset (REGNO (SUBREG_REG (out)),
1261						 GET_MODE (SUBREG_REG (out)),
1262						 SUBREG_BYTE (out),
1263						 GET_MODE (out)),
1264			    REGNO (SUBREG_REG (out)));
1265
1266      /* This relies on the fact that emit_reload_insns outputs the
1267	 instructions for output reloads of type RELOAD_OTHER in reverse
1268	 order of the reloads.  Thus if the outer reload is also of type
1269	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1270	 output after the outer reload.  */
1271      push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1272		   &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1273		   0, 0, opnum, RELOAD_OTHER);
1274      dont_remove_subreg = 1;
1275    }
1276
1277  /* If IN appears in OUT, we can't share any input-only reload for IN.  */
1278  if (in != 0 && out != 0 && MEM_P (out)
1279      && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1280      && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1281    dont_share = 1;
1282
1283  /* If IN is a SUBREG of a hard register, make a new REG.  This
1284     simplifies some of the cases below.  */
1285
1286  if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1287      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1288      && ! dont_remove_subreg)
1289    in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1290
1291  /* Similarly for OUT.  */
1292  if (out != 0 && GET_CODE (out) == SUBREG
1293      && REG_P (SUBREG_REG (out))
1294      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1295      && ! dont_remove_subreg)
1296    out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1297
1298  /* Narrow down the class of register wanted if that is
1299     desirable on this machine for efficiency.  */
1300  {
1301    reg_class_t preferred_class = rclass;
1302
1303    if (in != 0)
1304      preferred_class = targetm.preferred_reload_class (in, rclass);
1305
1306    /* Output reloads may need analogous treatment, different in detail.  */
1307    if (out != 0)
1308      preferred_class
1309	= targetm.preferred_output_reload_class (out, preferred_class);
1310
1311    /* Discard what the target said if we cannot do it.  */
1312    if (preferred_class != NO_REGS
1313	|| (optional && type == RELOAD_FOR_OUTPUT))
1314      rclass = (enum reg_class) preferred_class;
1315  }
1316
1317  /* Make sure we use a class that can handle the actual pseudo
1318     inside any subreg.  For example, on the 386, QImode regs
1319     can appear within SImode subregs.  Although GENERAL_REGS
1320     can handle SImode, QImode needs a smaller class.  */
1321#ifdef LIMIT_RELOAD_CLASS
1322  if (in_subreg_loc)
1323    rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1324  else if (in != 0 && GET_CODE (in) == SUBREG)
1325    rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1326
1327  if (out_subreg_loc)
1328    rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1329  if (out != 0 && GET_CODE (out) == SUBREG)
1330    rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1331#endif
1332
1333  /* Verify that this class is at least possible for the mode that
1334     is specified.  */
1335  if (this_insn_is_asm)
1336    {
1337      machine_mode mode;
1338      if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1339	mode = inmode;
1340      else
1341	mode = outmode;
1342      if (mode == VOIDmode)
1343	{
1344	  error_for_asm (this_insn, "cannot reload integer constant "
1345			 "operand in %<asm%>");
1346	  mode = word_mode;
1347	  if (in != 0)
1348	    inmode = word_mode;
1349	  if (out != 0)
1350	    outmode = word_mode;
1351	}
1352      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1353	if (HARD_REGNO_MODE_OK (i, mode)
1354	    && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1355	  break;
1356      if (i == FIRST_PSEUDO_REGISTER)
1357	{
1358	  error_for_asm (this_insn, "impossible register constraint "
1359			 "in %<asm%>");
1360	  /* Avoid further trouble with this insn.  */
1361	  PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1362	  /* We used to continue here setting class to ALL_REGS, but it triggers
1363	     sanity check on i386 for:
1364	     void foo(long double d)
1365	     {
1366	       asm("" :: "a" (d));
1367	     }
1368	     Returning zero here ought to be safe as we take care in
1369	     find_reloads to not process the reloads when instruction was
1370	     replaced by USE.  */
1371
1372	  return 0;
1373	}
1374    }
1375
1376  /* Optional output reloads are always OK even if we have no register class,
1377     since the function of these reloads is only to have spill_reg_store etc.
1378     set, so that the storing insn can be deleted later.  */
1379  gcc_assert (rclass != NO_REGS
1380	      || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1381
1382  i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1383
1384  if (i == n_reloads)
1385    {
1386      /* See if we need a secondary reload register to move between CLASS
1387	 and IN or CLASS and OUT.  Get the icode and push any required reloads
1388	 needed for each of them if so.  */
1389
1390      if (in != 0)
1391	secondary_in_reload
1392	  = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1393				   &secondary_in_icode, NULL);
1394      if (out != 0 && GET_CODE (out) != SCRATCH)
1395	secondary_out_reload
1396	  = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1397				   type, &secondary_out_icode, NULL);
1398
1399      /* We found no existing reload suitable for re-use.
1400	 So add an additional reload.  */
1401
1402#ifdef SECONDARY_MEMORY_NEEDED
1403      if (subreg_in_class == NO_REGS
1404	  && in != 0
1405	  && (REG_P (in)
1406	      || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1407	  && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1408	subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1409      /* If a memory location is needed for the copy, make one.  */
1410      if (subreg_in_class != NO_REGS
1411	  && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1412	get_secondary_mem (in, inmode, opnum, type);
1413#endif
1414
1415      i = n_reloads;
1416      rld[i].in = in;
1417      rld[i].out = out;
1418      rld[i].rclass = rclass;
1419      rld[i].inmode = inmode;
1420      rld[i].outmode = outmode;
1421      rld[i].reg_rtx = 0;
1422      rld[i].optional = optional;
1423      rld[i].inc = 0;
1424      rld[i].nocombine = 0;
1425      rld[i].in_reg = inloc ? *inloc : 0;
1426      rld[i].out_reg = outloc ? *outloc : 0;
1427      rld[i].opnum = opnum;
1428      rld[i].when_needed = type;
1429      rld[i].secondary_in_reload = secondary_in_reload;
1430      rld[i].secondary_out_reload = secondary_out_reload;
1431      rld[i].secondary_in_icode = secondary_in_icode;
1432      rld[i].secondary_out_icode = secondary_out_icode;
1433      rld[i].secondary_p = 0;
1434
1435      n_reloads++;
1436
1437#ifdef SECONDARY_MEMORY_NEEDED
1438      if (out != 0
1439          && (REG_P (out)
1440	      || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1441	  && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1442	  && SECONDARY_MEMORY_NEEDED (rclass,
1443				      REGNO_REG_CLASS (reg_or_subregno (out)),
1444				      outmode))
1445	get_secondary_mem (out, outmode, opnum, type);
1446#endif
1447    }
1448  else
1449    {
1450      /* We are reusing an existing reload,
1451	 but we may have additional information for it.
1452	 For example, we may now have both IN and OUT
1453	 while the old one may have just one of them.  */
1454
1455      /* The modes can be different.  If they are, we want to reload in
1456	 the larger mode, so that the value is valid for both modes.  */
1457      if (inmode != VOIDmode
1458	  && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1459	rld[i].inmode = inmode;
1460      if (outmode != VOIDmode
1461	  && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1462	rld[i].outmode = outmode;
1463      if (in != 0)
1464	{
1465	  rtx in_reg = inloc ? *inloc : 0;
1466	  /* If we merge reloads for two distinct rtl expressions that
1467	     are identical in content, there might be duplicate address
1468	     reloads.  Remove the extra set now, so that if we later find
1469	     that we can inherit this reload, we can get rid of the
1470	     address reloads altogether.
1471
1472	     Do not do this if both reloads are optional since the result
1473	     would be an optional reload which could potentially leave
1474	     unresolved address replacements.
1475
1476	     It is not sufficient to call transfer_replacements since
1477	     choose_reload_regs will remove the replacements for address
1478	     reloads of inherited reloads which results in the same
1479	     problem.  */
1480	  if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1481	      && ! (rld[i].optional && optional))
1482	    {
1483	      /* We must keep the address reload with the lower operand
1484		 number alive.  */
1485	      if (opnum > rld[i].opnum)
1486		{
1487		  remove_address_replacements (in);
1488		  in = rld[i].in;
1489		  in_reg = rld[i].in_reg;
1490		}
1491	      else
1492		remove_address_replacements (rld[i].in);
1493	    }
1494	  /* When emitting reloads we don't necessarily look at the in-
1495	     and outmode, but also directly at the operands (in and out).
1496	     So we can't simply overwrite them with whatever we have found
1497	     for this (to-be-merged) reload, we have to "merge" that too.
1498	     Reusing another reload already verified that we deal with the
1499	     same operands, just possibly in different modes.  So we
1500	     overwrite the operands only when the new mode is larger.
1501	     See also PR33613.  */
1502	  if (!rld[i].in
1503	      || GET_MODE_SIZE (GET_MODE (in))
1504	           > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1505	    rld[i].in = in;
1506	  if (!rld[i].in_reg
1507	      || (in_reg
1508		  && GET_MODE_SIZE (GET_MODE (in_reg))
1509	             > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1510	    rld[i].in_reg = in_reg;
1511	}
1512      if (out != 0)
1513	{
1514	  if (!rld[i].out
1515	      || (out
1516		  && GET_MODE_SIZE (GET_MODE (out))
1517	             > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1518	    rld[i].out = out;
1519	  if (outloc
1520	      && (!rld[i].out_reg
1521		  || GET_MODE_SIZE (GET_MODE (*outloc))
1522		     > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1523	    rld[i].out_reg = *outloc;
1524	}
1525      if (reg_class_subset_p (rclass, rld[i].rclass))
1526	rld[i].rclass = rclass;
1527      rld[i].optional &= optional;
1528      if (MERGE_TO_OTHER (type, rld[i].when_needed,
1529			  opnum, rld[i].opnum))
1530	rld[i].when_needed = RELOAD_OTHER;
1531      rld[i].opnum = MIN (rld[i].opnum, opnum);
1532    }
1533
1534  /* If the ostensible rtx being reloaded differs from the rtx found
1535     in the location to substitute, this reload is not safe to combine
1536     because we cannot reliably tell whether it appears in the insn.  */
1537
1538  if (in != 0 && in != *inloc)
1539    rld[i].nocombine = 1;
1540
1541#if 0
1542  /* This was replaced by changes in find_reloads_address_1 and the new
1543     function inc_for_reload, which go with a new meaning of reload_inc.  */
1544
1545  /* If this is an IN/OUT reload in an insn that sets the CC,
1546     it must be for an autoincrement.  It doesn't work to store
1547     the incremented value after the insn because that would clobber the CC.
1548     So we must do the increment of the value reloaded from,
1549     increment it, store it back, then decrement again.  */
1550  if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1551    {
1552      out = 0;
1553      rld[i].out = 0;
1554      rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1555      /* If we did not find a nonzero amount-to-increment-by,
1556	 that contradicts the belief that IN is being incremented
1557	 in an address in this insn.  */
1558      gcc_assert (rld[i].inc != 0);
1559    }
1560#endif
1561
1562  /* If we will replace IN and OUT with the reload-reg,
1563     record where they are located so that substitution need
1564     not do a tree walk.  */
1565
1566  if (replace_reloads)
1567    {
1568      if (inloc != 0)
1569	{
1570	  struct replacement *r = &replacements[n_replacements++];
1571	  r->what = i;
1572	  r->where = inloc;
1573	  r->mode = inmode;
1574	}
1575      if (outloc != 0 && outloc != inloc)
1576	{
1577	  struct replacement *r = &replacements[n_replacements++];
1578	  r->what = i;
1579	  r->where = outloc;
1580	  r->mode = outmode;
1581	}
1582    }
1583
1584  /* If this reload is just being introduced and it has both
1585     an incoming quantity and an outgoing quantity that are
1586     supposed to be made to match, see if either one of the two
1587     can serve as the place to reload into.
1588
1589     If one of them is acceptable, set rld[i].reg_rtx
1590     to that one.  */
1591
1592  if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1593    {
1594      rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1595					  inmode, outmode,
1596					  rld[i].rclass, i,
1597					  earlyclobber_operand_p (out));
1598
1599      /* If the outgoing register already contains the same value
1600	 as the incoming one, we can dispense with loading it.
1601	 The easiest way to tell the caller that is to give a phony
1602	 value for the incoming operand (same as outgoing one).  */
1603      if (rld[i].reg_rtx == out
1604	  && (REG_P (in) || CONSTANT_P (in))
1605	  && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1606				  static_reload_reg_p, i, inmode))
1607	rld[i].in = out;
1608    }
1609
1610  /* If this is an input reload and the operand contains a register that
1611     dies in this insn and is used nowhere else, see if it is the right class
1612     to be used for this reload.  Use it if so.  (This occurs most commonly
1613     in the case of paradoxical SUBREGs and in-out reloads).  We cannot do
1614     this if it is also an output reload that mentions the register unless
1615     the output is a SUBREG that clobbers an entire register.
1616
1617     Note that the operand might be one of the spill regs, if it is a
1618     pseudo reg and we are in a block where spilling has not taken place.
1619     But if there is no spilling in this block, that is OK.
1620     An explicitly used hard reg cannot be a spill reg.  */
1621
1622  if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1623    {
1624      rtx note;
1625      int regno;
1626      machine_mode rel_mode = inmode;
1627
1628      if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1629	rel_mode = outmode;
1630
1631      for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1632	if (REG_NOTE_KIND (note) == REG_DEAD
1633	    && REG_P (XEXP (note, 0))
1634	    && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1635	    && reg_mentioned_p (XEXP (note, 0), in)
1636	    /* Check that a former pseudo is valid; see find_dummy_reload.  */
1637	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1638		|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1639				    ORIGINAL_REGNO (XEXP (note, 0)))
1640		    && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1641	    && ! refers_to_regno_for_reload_p (regno,
1642					       end_hard_regno (rel_mode,
1643							       regno),
1644					       PATTERN (this_insn), inloc)
1645	    && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1646	    /* If this is also an output reload, IN cannot be used as
1647	       the reload register if it is set in this insn unless IN
1648	       is also OUT.  */
1649	    && (out == 0 || in == out
1650		|| ! hard_reg_set_here_p (regno,
1651					  end_hard_regno (rel_mode, regno),
1652					  PATTERN (this_insn)))
1653	    /* ??? Why is this code so different from the previous?
1654	       Is there any simple coherent way to describe the two together?
1655	       What's going on here.  */
1656	    && (in != out
1657		|| (GET_CODE (in) == SUBREG
1658		    && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1659			 / UNITS_PER_WORD)
1660			== ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1661			     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1662	    /* Make sure the operand fits in the reg that dies.  */
1663	    && (GET_MODE_SIZE (rel_mode)
1664		<= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1665	    && HARD_REGNO_MODE_OK (regno, inmode)
1666	    && HARD_REGNO_MODE_OK (regno, outmode))
1667	  {
1668	    unsigned int offs;
1669	    unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1670				      hard_regno_nregs[regno][outmode]);
1671
1672	    for (offs = 0; offs < nregs; offs++)
1673	      if (fixed_regs[regno + offs]
1674		  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1675					  regno + offs))
1676		break;
1677
1678	    if (offs == nregs
1679		&& (! (refers_to_regno_for_reload_p
1680		       (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1681		    || can_reload_into (in, regno, inmode)))
1682	      {
1683		rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1684		break;
1685	      }
1686	  }
1687    }
1688
1689  if (out)
1690    output_reloadnum = i;
1691
1692  return i;
1693}
1694
1695/* Record an additional place we must replace a value
1696   for which we have already recorded a reload.
1697   RELOADNUM is the value returned by push_reload
1698   when the reload was recorded.
1699   This is used in insn patterns that use match_dup.  */
1700
1701static void
1702push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1703{
1704  if (replace_reloads)
1705    {
1706      struct replacement *r = &replacements[n_replacements++];
1707      r->what = reloadnum;
1708      r->where = loc;
1709      r->mode = mode;
1710    }
1711}
1712
1713/* Duplicate any replacement we have recorded to apply at
1714   location ORIG_LOC to also be performed at DUP_LOC.
1715   This is used in insn patterns that use match_dup.  */
1716
1717static void
1718dup_replacements (rtx *dup_loc, rtx *orig_loc)
1719{
1720  int i, n = n_replacements;
1721
1722  for (i = 0; i < n; i++)
1723    {
1724      struct replacement *r = &replacements[i];
1725      if (r->where == orig_loc)
1726	push_replacement (dup_loc, r->what, r->mode);
1727    }
1728}
1729
1730/* Transfer all replacements that used to be in reload FROM to be in
1731   reload TO.  */
1732
1733void
1734transfer_replacements (int to, int from)
1735{
1736  int i;
1737
1738  for (i = 0; i < n_replacements; i++)
1739    if (replacements[i].what == from)
1740      replacements[i].what = to;
1741}
1742
1743/* IN_RTX is the value loaded by a reload that we now decided to inherit,
1744   or a subpart of it.  If we have any replacements registered for IN_RTX,
1745   cancel the reloads that were supposed to load them.
1746   Return nonzero if we canceled any reloads.  */
1747int
1748remove_address_replacements (rtx in_rtx)
1749{
1750  int i, j;
1751  char reload_flags[MAX_RELOADS];
1752  int something_changed = 0;
1753
1754  memset (reload_flags, 0, sizeof reload_flags);
1755  for (i = 0, j = 0; i < n_replacements; i++)
1756    {
1757      if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1758	reload_flags[replacements[i].what] |= 1;
1759      else
1760	{
1761	  replacements[j++] = replacements[i];
1762	  reload_flags[replacements[i].what] |= 2;
1763	}
1764    }
1765  /* Note that the following store must be done before the recursive calls.  */
1766  n_replacements = j;
1767
1768  for (i = n_reloads - 1; i >= 0; i--)
1769    {
1770      if (reload_flags[i] == 1)
1771	{
1772	  deallocate_reload_reg (i);
1773	  remove_address_replacements (rld[i].in);
1774	  rld[i].in = 0;
1775	  something_changed = 1;
1776	}
1777    }
1778  return something_changed;
1779}
1780
1781/* If there is only one output reload, and it is not for an earlyclobber
1782   operand, try to combine it with a (logically unrelated) input reload
1783   to reduce the number of reload registers needed.
1784
1785   This is safe if the input reload does not appear in
1786   the value being output-reloaded, because this implies
1787   it is not needed any more once the original insn completes.
1788
1789   If that doesn't work, see we can use any of the registers that
1790   die in this insn as a reload register.  We can if it is of the right
1791   class and does not appear in the value being output-reloaded.  */
1792
1793static void
1794combine_reloads (void)
1795{
1796  int i, regno;
1797  int output_reload = -1;
1798  int secondary_out = -1;
1799  rtx note;
1800
1801  /* Find the output reload; return unless there is exactly one
1802     and that one is mandatory.  */
1803
1804  for (i = 0; i < n_reloads; i++)
1805    if (rld[i].out != 0)
1806      {
1807	if (output_reload >= 0)
1808	  return;
1809	output_reload = i;
1810      }
1811
1812  if (output_reload < 0 || rld[output_reload].optional)
1813    return;
1814
1815  /* An input-output reload isn't combinable.  */
1816
1817  if (rld[output_reload].in != 0)
1818    return;
1819
1820  /* If this reload is for an earlyclobber operand, we can't do anything.  */
1821  if (earlyclobber_operand_p (rld[output_reload].out))
1822    return;
1823
1824  /* If there is a reload for part of the address of this operand, we would
1825     need to change it to RELOAD_FOR_OTHER_ADDRESS.  But that would extend
1826     its life to the point where doing this combine would not lower the
1827     number of spill registers needed.  */
1828  for (i = 0; i < n_reloads; i++)
1829    if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1830	 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1831	&& rld[i].opnum == rld[output_reload].opnum)
1832      return;
1833
1834  /* Check each input reload; can we combine it?  */
1835
1836  for (i = 0; i < n_reloads; i++)
1837    if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1838	/* Life span of this reload must not extend past main insn.  */
1839	&& rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1840	&& rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1841	&& rld[i].when_needed != RELOAD_OTHER
1842	&& (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1843	    == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1844				       [(int) rld[output_reload].outmode])
1845	&& rld[i].inc == 0
1846	&& rld[i].reg_rtx == 0
1847#ifdef SECONDARY_MEMORY_NEEDED
1848	/* Don't combine two reloads with different secondary
1849	   memory locations.  */
1850	&& (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1851	    || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1852	    || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1853			    secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1854#endif
1855	&& (targetm.small_register_classes_for_mode_p (VOIDmode)
1856	    ? (rld[i].rclass == rld[output_reload].rclass)
1857	    : (reg_class_subset_p (rld[i].rclass,
1858				   rld[output_reload].rclass)
1859	       || reg_class_subset_p (rld[output_reload].rclass,
1860				      rld[i].rclass)))
1861	&& (MATCHES (rld[i].in, rld[output_reload].out)
1862	    /* Args reversed because the first arg seems to be
1863	       the one that we imagine being modified
1864	       while the second is the one that might be affected.  */
1865	    || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1866						      rld[i].in)
1867		/* However, if the input is a register that appears inside
1868		   the output, then we also can't share.
1869		   Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1870		   If the same reload reg is used for both reg 69 and the
1871		   result to be stored in memory, then that result
1872		   will clobber the address of the memory ref.  */
1873		&& ! (REG_P (rld[i].in)
1874		      && reg_overlap_mentioned_for_reload_p (rld[i].in,
1875							     rld[output_reload].out))))
1876	&& ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1877					 rld[i].when_needed != RELOAD_FOR_INPUT)
1878	&& (reg_class_size[(int) rld[i].rclass]
1879	    || targetm.small_register_classes_for_mode_p (VOIDmode))
1880	/* We will allow making things slightly worse by combining an
1881	   input and an output, but no worse than that.  */
1882	&& (rld[i].when_needed == RELOAD_FOR_INPUT
1883	    || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1884      {
1885	int j;
1886
1887	/* We have found a reload to combine with!  */
1888	rld[i].out = rld[output_reload].out;
1889	rld[i].out_reg = rld[output_reload].out_reg;
1890	rld[i].outmode = rld[output_reload].outmode;
1891	/* Mark the old output reload as inoperative.  */
1892	rld[output_reload].out = 0;
1893	/* The combined reload is needed for the entire insn.  */
1894	rld[i].when_needed = RELOAD_OTHER;
1895	/* If the output reload had a secondary reload, copy it.  */
1896	if (rld[output_reload].secondary_out_reload != -1)
1897	  {
1898	    rld[i].secondary_out_reload
1899	      = rld[output_reload].secondary_out_reload;
1900	    rld[i].secondary_out_icode
1901	      = rld[output_reload].secondary_out_icode;
1902	  }
1903
1904#ifdef SECONDARY_MEMORY_NEEDED
1905	/* Copy any secondary MEM.  */
1906	if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1907	  secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1908	    = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1909#endif
1910	/* If required, minimize the register class.  */
1911	if (reg_class_subset_p (rld[output_reload].rclass,
1912				rld[i].rclass))
1913	  rld[i].rclass = rld[output_reload].rclass;
1914
1915	/* Transfer all replacements from the old reload to the combined.  */
1916	for (j = 0; j < n_replacements; j++)
1917	  if (replacements[j].what == output_reload)
1918	    replacements[j].what = i;
1919
1920	return;
1921      }
1922
1923  /* If this insn has only one operand that is modified or written (assumed
1924     to be the first),  it must be the one corresponding to this reload.  It
1925     is safe to use anything that dies in this insn for that output provided
1926     that it does not occur in the output (we already know it isn't an
1927     earlyclobber.  If this is an asm insn, give up.  */
1928
1929  if (INSN_CODE (this_insn) == -1)
1930    return;
1931
1932  for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1933    if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1934	|| insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1935      return;
1936
1937  /* See if some hard register that dies in this insn and is not used in
1938     the output is the right class.  Only works if the register we pick
1939     up can fully hold our output reload.  */
1940  for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1941    if (REG_NOTE_KIND (note) == REG_DEAD
1942	&& REG_P (XEXP (note, 0))
1943	&& !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1944						rld[output_reload].out)
1945	&& (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1946	&& HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1947	&& TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1948			      regno)
1949	&& (hard_regno_nregs[regno][rld[output_reload].outmode]
1950	    <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1951	/* Ensure that a secondary or tertiary reload for this output
1952	   won't want this register.  */
1953	&& ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1954	    || (!(TEST_HARD_REG_BIT
1955		  (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1956		&& ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1957		    || !(TEST_HARD_REG_BIT
1958			 (reg_class_contents[(int) rld[secondary_out].rclass],
1959			  regno)))))
1960	&& !fixed_regs[regno]
1961	/* Check that a former pseudo is valid; see find_dummy_reload.  */
1962	&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1963	    || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1964			       ORIGINAL_REGNO (XEXP (note, 0)))
1965		&& hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1966      {
1967	rld[output_reload].reg_rtx
1968	  = gen_rtx_REG (rld[output_reload].outmode, regno);
1969	return;
1970      }
1971}
1972
1973/* Try to find a reload register for an in-out reload (expressions IN and OUT).
1974   See if one of IN and OUT is a register that may be used;
1975   this is desirable since a spill-register won't be needed.
1976   If so, return the register rtx that proves acceptable.
1977
1978   INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1979   RCLASS is the register class required for the reload.
1980
1981   If FOR_REAL is >= 0, it is the number of the reload,
1982   and in some cases when it can be discovered that OUT doesn't need
1983   to be computed, clear out rld[FOR_REAL].out.
1984
1985   If FOR_REAL is -1, this should not be done, because this call
1986   is just to see if a register can be found, not to find and install it.
1987
1988   EARLYCLOBBER is nonzero if OUT is an earlyclobber operand.  This
1989   puts an additional constraint on being able to use IN for OUT since
1990   IN must not appear elsewhere in the insn (it is assumed that IN itself
1991   is safe from the earlyclobber).  */
1992
1993static rtx
1994find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1995		   machine_mode inmode, machine_mode outmode,
1996		   reg_class_t rclass, int for_real, int earlyclobber)
1997{
1998  rtx in = real_in;
1999  rtx out = real_out;
2000  int in_offset = 0;
2001  int out_offset = 0;
2002  rtx value = 0;
2003
2004  /* If operands exceed a word, we can't use either of them
2005     unless they have the same size.  */
2006  if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
2007      && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
2008	  || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
2009    return 0;
2010
2011  /* Note that {in,out}_offset are needed only when 'in' or 'out'
2012     respectively refers to a hard register.  */
2013
2014  /* Find the inside of any subregs.  */
2015  while (GET_CODE (out) == SUBREG)
2016    {
2017      if (REG_P (SUBREG_REG (out))
2018	  && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
2019	out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
2020					   GET_MODE (SUBREG_REG (out)),
2021					   SUBREG_BYTE (out),
2022					   GET_MODE (out));
2023      out = SUBREG_REG (out);
2024    }
2025  while (GET_CODE (in) == SUBREG)
2026    {
2027      if (REG_P (SUBREG_REG (in))
2028	  && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2029	in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2030					  GET_MODE (SUBREG_REG (in)),
2031					  SUBREG_BYTE (in),
2032					  GET_MODE (in));
2033      in = SUBREG_REG (in);
2034    }
2035
2036  /* Narrow down the reg class, the same way push_reload will;
2037     otherwise we might find a dummy now, but push_reload won't.  */
2038  {
2039    reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2040    if (preferred_class != NO_REGS)
2041      rclass = (enum reg_class) preferred_class;
2042  }
2043
2044  /* See if OUT will do.  */
2045  if (REG_P (out)
2046      && REGNO (out) < FIRST_PSEUDO_REGISTER)
2047    {
2048      unsigned int regno = REGNO (out) + out_offset;
2049      unsigned int nwords = hard_regno_nregs[regno][outmode];
2050      rtx saved_rtx;
2051
2052      /* When we consider whether the insn uses OUT,
2053	 ignore references within IN.  They don't prevent us
2054	 from copying IN into OUT, because those refs would
2055	 move into the insn that reloads IN.
2056
2057	 However, we only ignore IN in its role as this reload.
2058	 If the insn uses IN elsewhere and it contains OUT,
2059	 that counts.  We can't be sure it's the "same" operand
2060	 so it might not go through this reload.
2061
2062         We also need to avoid using OUT if it, or part of it, is a
2063         fixed register.  Modifying such registers, even transiently,
2064         may have undefined effects on the machine, such as modifying
2065         the stack pointer.  */
2066      saved_rtx = *inloc;
2067      *inloc = const0_rtx;
2068
2069      if (regno < FIRST_PSEUDO_REGISTER
2070	  && HARD_REGNO_MODE_OK (regno, outmode)
2071	  && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2072					     PATTERN (this_insn), outloc))
2073	{
2074	  unsigned int i;
2075
2076	  for (i = 0; i < nwords; i++)
2077	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2078				     regno + i)
2079		|| fixed_regs[regno + i])
2080	      break;
2081
2082	  if (i == nwords)
2083	    {
2084	      if (REG_P (real_out))
2085		value = real_out;
2086	      else
2087		value = gen_rtx_REG (outmode, regno);
2088	    }
2089	}
2090
2091      *inloc = saved_rtx;
2092    }
2093
2094  /* Consider using IN if OUT was not acceptable
2095     or if OUT dies in this insn (like the quotient in a divmod insn).
2096     We can't use IN unless it is dies in this insn,
2097     which means we must know accurately which hard regs are live.
2098     Also, the result can't go in IN if IN is used within OUT,
2099     or if OUT is an earlyclobber and IN appears elsewhere in the insn.  */
2100  if (hard_regs_live_known
2101      && REG_P (in)
2102      && REGNO (in) < FIRST_PSEUDO_REGISTER
2103      && (value == 0
2104	  || find_reg_note (this_insn, REG_UNUSED, real_out))
2105      && find_reg_note (this_insn, REG_DEAD, real_in)
2106      && !fixed_regs[REGNO (in)]
2107      && HARD_REGNO_MODE_OK (REGNO (in),
2108			     /* The only case where out and real_out might
2109				have different modes is where real_out
2110				is a subreg, and in that case, out
2111				has a real mode.  */
2112			     (GET_MODE (out) != VOIDmode
2113			      ? GET_MODE (out) : outmode))
2114      && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2115	  /* However only do this if we can be sure that this input
2116	     operand doesn't correspond with an uninitialized pseudo.
2117	     global can assign some hardreg to it that is the same as
2118	     the one assigned to a different, also live pseudo (as it
2119	     can ignore the conflict).  We must never introduce writes
2120	     to such hardregs, as they would clobber the other live
2121	     pseudo.  See PR 20973.  */
2122	  || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2123			     ORIGINAL_REGNO (in))
2124	      /* Similarly, only do this if we can be sure that the death
2125		 note is still valid.  global can assign some hardreg to
2126		 the pseudo referenced in the note and simultaneously a
2127		 subword of this hardreg to a different, also live pseudo,
2128		 because only another subword of the hardreg is actually
2129		 used in the insn.  This cannot happen if the pseudo has
2130		 been assigned exactly one hardreg.  See PR 33732.  */
2131	      && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2132    {
2133      unsigned int regno = REGNO (in) + in_offset;
2134      unsigned int nwords = hard_regno_nregs[regno][inmode];
2135
2136      if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2137	  && ! hard_reg_set_here_p (regno, regno + nwords,
2138				    PATTERN (this_insn))
2139	  && (! earlyclobber
2140	      || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2141						 PATTERN (this_insn), inloc)))
2142	{
2143	  unsigned int i;
2144
2145	  for (i = 0; i < nwords; i++)
2146	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2147				     regno + i))
2148	      break;
2149
2150	  if (i == nwords)
2151	    {
2152	      /* If we were going to use OUT as the reload reg
2153		 and changed our mind, it means OUT is a dummy that
2154		 dies here.  So don't bother copying value to it.  */
2155	      if (for_real >= 0 && value == real_out)
2156		rld[for_real].out = 0;
2157	      if (REG_P (real_in))
2158		value = real_in;
2159	      else
2160		value = gen_rtx_REG (inmode, regno);
2161	    }
2162	}
2163    }
2164
2165  return value;
2166}
2167
2168/* This page contains subroutines used mainly for determining
2169   whether the IN or an OUT of a reload can serve as the
2170   reload register.  */
2171
2172/* Return 1 if X is an operand of an insn that is being earlyclobbered.  */
2173
2174int
2175earlyclobber_operand_p (rtx x)
2176{
2177  int i;
2178
2179  for (i = 0; i < n_earlyclobbers; i++)
2180    if (reload_earlyclobbers[i] == x)
2181      return 1;
2182
2183  return 0;
2184}
2185
2186/* Return 1 if expression X alters a hard reg in the range
2187   from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2188   either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2189   X should be the body of an instruction.  */
2190
2191static int
2192hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2193{
2194  if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2195    {
2196      rtx op0 = SET_DEST (x);
2197
2198      while (GET_CODE (op0) == SUBREG)
2199	op0 = SUBREG_REG (op0);
2200      if (REG_P (op0))
2201	{
2202	  unsigned int r = REGNO (op0);
2203
2204	  /* See if this reg overlaps range under consideration.  */
2205	  if (r < end_regno
2206	      && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2207	    return 1;
2208	}
2209    }
2210  else if (GET_CODE (x) == PARALLEL)
2211    {
2212      int i = XVECLEN (x, 0) - 1;
2213
2214      for (; i >= 0; i--)
2215	if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2216	  return 1;
2217    }
2218
2219  return 0;
2220}
2221
2222/* Return 1 if ADDR is a valid memory address for mode MODE
2223   in address space AS, and check that each pseudo reg has the
2224   proper kind of hard reg.  */
2225
2226int
2227strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2228				    rtx addr, addr_space_t as)
2229{
2230#ifdef GO_IF_LEGITIMATE_ADDRESS
2231  gcc_assert (ADDR_SPACE_GENERIC_P (as));
2232  GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2233  return 0;
2234
2235 win:
2236  return 1;
2237#else
2238  return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2239#endif
2240}
2241
2242/* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2243   if they are the same hard reg, and has special hacks for
2244   autoincrement and autodecrement.
2245   This is specifically intended for find_reloads to use
2246   in determining whether two operands match.
2247   X is the operand whose number is the lower of the two.
2248
2249   The value is 2 if Y contains a pre-increment that matches
2250   a non-incrementing address in X.  */
2251
2252/* ??? To be completely correct, we should arrange to pass
2253   for X the output operand and for Y the input operand.
2254   For now, we assume that the output operand has the lower number
2255   because that is natural in (SET output (... input ...)).  */
2256
2257int
2258operands_match_p (rtx x, rtx y)
2259{
2260  int i;
2261  RTX_CODE code = GET_CODE (x);
2262  const char *fmt;
2263  int success_2;
2264
2265  if (x == y)
2266    return 1;
2267  if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2268      && (REG_P (y) || (GET_CODE (y) == SUBREG
2269				  && REG_P (SUBREG_REG (y)))))
2270    {
2271      int j;
2272
2273      if (code == SUBREG)
2274	{
2275	  i = REGNO (SUBREG_REG (x));
2276	  if (i >= FIRST_PSEUDO_REGISTER)
2277	    goto slow;
2278	  i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2279				    GET_MODE (SUBREG_REG (x)),
2280				    SUBREG_BYTE (x),
2281				    GET_MODE (x));
2282	}
2283      else
2284	i = REGNO (x);
2285
2286      if (GET_CODE (y) == SUBREG)
2287	{
2288	  j = REGNO (SUBREG_REG (y));
2289	  if (j >= FIRST_PSEUDO_REGISTER)
2290	    goto slow;
2291	  j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2292				    GET_MODE (SUBREG_REG (y)),
2293				    SUBREG_BYTE (y),
2294				    GET_MODE (y));
2295	}
2296      else
2297	j = REGNO (y);
2298
2299      /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2300	 multiple hard register group of scalar integer registers, so that
2301	 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2302	 register.  */
2303      if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2304	  && SCALAR_INT_MODE_P (GET_MODE (x))
2305	  && i < FIRST_PSEUDO_REGISTER)
2306	i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2307      if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2308	  && SCALAR_INT_MODE_P (GET_MODE (y))
2309	  && j < FIRST_PSEUDO_REGISTER)
2310	j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2311
2312      return i == j;
2313    }
2314  /* If two operands must match, because they are really a single
2315     operand of an assembler insn, then two postincrements are invalid
2316     because the assembler insn would increment only once.
2317     On the other hand, a postincrement matches ordinary indexing
2318     if the postincrement is the output operand.  */
2319  if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2320    return operands_match_p (XEXP (x, 0), y);
2321  /* Two preincrements are invalid
2322     because the assembler insn would increment only once.
2323     On the other hand, a preincrement matches ordinary indexing
2324     if the preincrement is the input operand.
2325     In this case, return 2, since some callers need to do special
2326     things when this happens.  */
2327  if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2328      || GET_CODE (y) == PRE_MODIFY)
2329    return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2330
2331 slow:
2332
2333  /* Now we have disposed of all the cases in which different rtx codes
2334     can match.  */
2335  if (code != GET_CODE (y))
2336    return 0;
2337
2338  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2339  if (GET_MODE (x) != GET_MODE (y))
2340    return 0;
2341
2342  /* MEMs referring to different address space are not equivalent.  */
2343  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2344    return 0;
2345
2346  switch (code)
2347    {
2348    CASE_CONST_UNIQUE:
2349      return 0;
2350
2351    case LABEL_REF:
2352      return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2353    case SYMBOL_REF:
2354      return XSTR (x, 0) == XSTR (y, 0);
2355
2356    default:
2357      break;
2358    }
2359
2360  /* Compare the elements.  If any pair of corresponding elements
2361     fail to match, return 0 for the whole things.  */
2362
2363  success_2 = 0;
2364  fmt = GET_RTX_FORMAT (code);
2365  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2366    {
2367      int val, j;
2368      switch (fmt[i])
2369	{
2370	case 'w':
2371	  if (XWINT (x, i) != XWINT (y, i))
2372	    return 0;
2373	  break;
2374
2375	case 'i':
2376	  if (XINT (x, i) != XINT (y, i))
2377	    return 0;
2378	  break;
2379
2380	case 'e':
2381	  val = operands_match_p (XEXP (x, i), XEXP (y, i));
2382	  if (val == 0)
2383	    return 0;
2384	  /* If any subexpression returns 2,
2385	     we should return 2 if we are successful.  */
2386	  if (val == 2)
2387	    success_2 = 1;
2388	  break;
2389
2390	case '0':
2391	  break;
2392
2393	case 'E':
2394	  if (XVECLEN (x, i) != XVECLEN (y, i))
2395	    return 0;
2396	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2397	    {
2398	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2399	      if (val == 0)
2400		return 0;
2401	      if (val == 2)
2402		success_2 = 1;
2403	    }
2404	  break;
2405
2406	  /* It is believed that rtx's at this level will never
2407	     contain anything but integers and other rtx's,
2408	     except for within LABEL_REFs and SYMBOL_REFs.  */
2409	default:
2410	  gcc_unreachable ();
2411	}
2412    }
2413  return 1 + success_2;
2414}
2415
2416/* Describe the range of registers or memory referenced by X.
2417   If X is a register, set REG_FLAG and put the first register
2418   number into START and the last plus one into END.
2419   If X is a memory reference, put a base address into BASE
2420   and a range of integer offsets into START and END.
2421   If X is pushing on the stack, we can assume it causes no trouble,
2422   so we set the SAFE field.  */
2423
2424static struct decomposition
2425decompose (rtx x)
2426{
2427  struct decomposition val;
2428  int all_const = 0;
2429
2430  memset (&val, 0, sizeof (val));
2431
2432  switch (GET_CODE (x))
2433    {
2434    case MEM:
2435      {
2436	rtx base = NULL_RTX, offset = 0;
2437	rtx addr = XEXP (x, 0);
2438
2439	if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2440	    || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2441	  {
2442	    val.base = XEXP (addr, 0);
2443	    val.start = -GET_MODE_SIZE (GET_MODE (x));
2444	    val.end = GET_MODE_SIZE (GET_MODE (x));
2445	    val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2446	    return val;
2447	  }
2448
2449	if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2450	  {
2451	    if (GET_CODE (XEXP (addr, 1)) == PLUS
2452		&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2453		&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2454	      {
2455		val.base  = XEXP (addr, 0);
2456		val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2457		val.end   = INTVAL (XEXP (XEXP (addr, 1), 1));
2458		val.safe  = REGNO (val.base) == STACK_POINTER_REGNUM;
2459		return val;
2460	      }
2461	  }
2462
2463	if (GET_CODE (addr) == CONST)
2464	  {
2465	    addr = XEXP (addr, 0);
2466	    all_const = 1;
2467	  }
2468	if (GET_CODE (addr) == PLUS)
2469	  {
2470	    if (CONSTANT_P (XEXP (addr, 0)))
2471	      {
2472		base = XEXP (addr, 1);
2473		offset = XEXP (addr, 0);
2474	      }
2475	    else if (CONSTANT_P (XEXP (addr, 1)))
2476	      {
2477		base = XEXP (addr, 0);
2478		offset = XEXP (addr, 1);
2479	      }
2480	  }
2481
2482	if (offset == 0)
2483	  {
2484	    base = addr;
2485	    offset = const0_rtx;
2486	  }
2487	if (GET_CODE (offset) == CONST)
2488	  offset = XEXP (offset, 0);
2489	if (GET_CODE (offset) == PLUS)
2490	  {
2491	    if (CONST_INT_P (XEXP (offset, 0)))
2492	      {
2493		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2494		offset = XEXP (offset, 0);
2495	      }
2496	    else if (CONST_INT_P (XEXP (offset, 1)))
2497	      {
2498		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2499		offset = XEXP (offset, 1);
2500	      }
2501	    else
2502	      {
2503		base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2504		offset = const0_rtx;
2505	      }
2506	  }
2507	else if (!CONST_INT_P (offset))
2508	  {
2509	    base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2510	    offset = const0_rtx;
2511	  }
2512
2513	if (all_const && GET_CODE (base) == PLUS)
2514	  base = gen_rtx_CONST (GET_MODE (base), base);
2515
2516	gcc_assert (CONST_INT_P (offset));
2517
2518	val.start = INTVAL (offset);
2519	val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2520	val.base = base;
2521      }
2522      break;
2523
2524    case REG:
2525      val.reg_flag = 1;
2526      val.start = true_regnum (x);
2527      if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2528	{
2529	  /* A pseudo with no hard reg.  */
2530	  val.start = REGNO (x);
2531	  val.end = val.start + 1;
2532	}
2533      else
2534	/* A hard reg.  */
2535	val.end = end_hard_regno (GET_MODE (x), val.start);
2536      break;
2537
2538    case SUBREG:
2539      if (!REG_P (SUBREG_REG (x)))
2540	/* This could be more precise, but it's good enough.  */
2541	return decompose (SUBREG_REG (x));
2542      val.reg_flag = 1;
2543      val.start = true_regnum (x);
2544      if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2545	return decompose (SUBREG_REG (x));
2546      else
2547	/* A hard reg.  */
2548	val.end = val.start + subreg_nregs (x);
2549      break;
2550
2551    case SCRATCH:
2552      /* This hasn't been assigned yet, so it can't conflict yet.  */
2553      val.safe = 1;
2554      break;
2555
2556    default:
2557      gcc_assert (CONSTANT_P (x));
2558      val.safe = 1;
2559      break;
2560    }
2561  return val;
2562}
2563
2564/* Return 1 if altering Y will not modify the value of X.
2565   Y is also described by YDATA, which should be decompose (Y).  */
2566
2567static int
2568immune_p (rtx x, rtx y, struct decomposition ydata)
2569{
2570  struct decomposition xdata;
2571
2572  if (ydata.reg_flag)
2573    return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2574  if (ydata.safe)
2575    return 1;
2576
2577  gcc_assert (MEM_P (y));
2578  /* If Y is memory and X is not, Y can't affect X.  */
2579  if (!MEM_P (x))
2580    return 1;
2581
2582  xdata = decompose (x);
2583
2584  if (! rtx_equal_p (xdata.base, ydata.base))
2585    {
2586      /* If bases are distinct symbolic constants, there is no overlap.  */
2587      if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2588	return 1;
2589      /* Constants and stack slots never overlap.  */
2590      if (CONSTANT_P (xdata.base)
2591	  && (ydata.base == frame_pointer_rtx
2592	      || ydata.base == hard_frame_pointer_rtx
2593	      || ydata.base == stack_pointer_rtx))
2594	return 1;
2595      if (CONSTANT_P (ydata.base)
2596	  && (xdata.base == frame_pointer_rtx
2597	      || xdata.base == hard_frame_pointer_rtx
2598	      || xdata.base == stack_pointer_rtx))
2599	return 1;
2600      /* If either base is variable, we don't know anything.  */
2601      return 0;
2602    }
2603
2604  return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2605}
2606
2607/* Similar, but calls decompose.  */
2608
2609int
2610safe_from_earlyclobber (rtx op, rtx clobber)
2611{
2612  struct decomposition early_data;
2613
2614  early_data = decompose (clobber);
2615  return immune_p (op, clobber, early_data);
2616}
2617
2618/* Main entry point of this file: search the body of INSN
2619   for values that need reloading and record them with push_reload.
2620   REPLACE nonzero means record also where the values occur
2621   so that subst_reloads can be used.
2622
2623   IND_LEVELS says how many levels of indirection are supported by this
2624   machine; a value of zero means that a memory reference is not a valid
2625   memory address.
2626
2627   LIVE_KNOWN says we have valid information about which hard
2628   regs are live at each point in the program; this is true when
2629   we are called from global_alloc but false when stupid register
2630   allocation has been done.
2631
2632   RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2633   which is nonnegative if the reg has been commandeered for reloading into.
2634   It is copied into STATIC_RELOAD_REG_P and referenced from there
2635   by various subroutines.
2636
2637   Return TRUE if some operands need to be changed, because of swapping
2638   commutative operands, reg_equiv_address substitution, or whatever.  */
2639
2640int
2641find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2642	      short *reload_reg_p)
2643{
2644  int insn_code_number;
2645  int i, j;
2646  int noperands;
2647  /* These start out as the constraints for the insn
2648     and they are chewed up as we consider alternatives.  */
2649  const char *constraints[MAX_RECOG_OPERANDS];
2650  /* These are the preferred classes for an operand, or NO_REGS if it isn't
2651     a register.  */
2652  enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2653  char pref_or_nothing[MAX_RECOG_OPERANDS];
2654  /* Nonzero for a MEM operand whose entire address needs a reload.
2655     May be -1 to indicate the entire address may or may not need a reload.  */
2656  int address_reloaded[MAX_RECOG_OPERANDS];
2657  /* Nonzero for an address operand that needs to be completely reloaded.
2658     May be -1 to indicate the entire operand may or may not need a reload.  */
2659  int address_operand_reloaded[MAX_RECOG_OPERANDS];
2660  /* Value of enum reload_type to use for operand.  */
2661  enum reload_type operand_type[MAX_RECOG_OPERANDS];
2662  /* Value of enum reload_type to use within address of operand.  */
2663  enum reload_type address_type[MAX_RECOG_OPERANDS];
2664  /* Save the usage of each operand.  */
2665  enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2666  int no_input_reloads = 0, no_output_reloads = 0;
2667  int n_alternatives;
2668  reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2669  char this_alternative_match_win[MAX_RECOG_OPERANDS];
2670  char this_alternative_win[MAX_RECOG_OPERANDS];
2671  char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2672  char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2673  int this_alternative_matches[MAX_RECOG_OPERANDS];
2674  reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2675  int this_alternative_number;
2676  int goal_alternative_number = 0;
2677  int operand_reloadnum[MAX_RECOG_OPERANDS];
2678  int goal_alternative_matches[MAX_RECOG_OPERANDS];
2679  int goal_alternative_matched[MAX_RECOG_OPERANDS];
2680  char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2681  char goal_alternative_win[MAX_RECOG_OPERANDS];
2682  char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2683  char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2684  int goal_alternative_swapped;
2685  int best;
2686  int commutative;
2687  char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2688  rtx substed_operand[MAX_RECOG_OPERANDS];
2689  rtx body = PATTERN (insn);
2690  rtx set = single_set (insn);
2691  int goal_earlyclobber = 0, this_earlyclobber;
2692  machine_mode operand_mode[MAX_RECOG_OPERANDS];
2693  int retval = 0;
2694
2695  this_insn = insn;
2696  n_reloads = 0;
2697  n_replacements = 0;
2698  n_earlyclobbers = 0;
2699  replace_reloads = replace;
2700  hard_regs_live_known = live_known;
2701  static_reload_reg_p = reload_reg_p;
2702
2703  /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2704     neither are insns that SET cc0.  Insns that use CC0 are not allowed
2705     to have any input reloads.  */
2706  if (JUMP_P (insn) || CALL_P (insn))
2707    no_output_reloads = 1;
2708
2709#ifdef HAVE_cc0
2710  if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2711    no_input_reloads = 1;
2712  if (reg_set_p (cc0_rtx, PATTERN (insn)))
2713    no_output_reloads = 1;
2714#endif
2715
2716#ifdef SECONDARY_MEMORY_NEEDED
2717  /* The eliminated forms of any secondary memory locations are per-insn, so
2718     clear them out here.  */
2719
2720  if (secondary_memlocs_elim_used)
2721    {
2722      memset (secondary_memlocs_elim, 0,
2723	      sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2724      secondary_memlocs_elim_used = 0;
2725    }
2726#endif
2727
2728  /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2729     is cheap to move between them.  If it is not, there may not be an insn
2730     to do the copy, so we may need a reload.  */
2731  if (GET_CODE (body) == SET
2732      && REG_P (SET_DEST (body))
2733      && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2734      && REG_P (SET_SRC (body))
2735      && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2736      && register_move_cost (GET_MODE (SET_SRC (body)),
2737			     REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2738			     REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2739    return 0;
2740
2741  extract_insn (insn);
2742
2743  noperands = reload_n_operands = recog_data.n_operands;
2744  n_alternatives = recog_data.n_alternatives;
2745
2746  /* Just return "no reloads" if insn has no operands with constraints.  */
2747  if (noperands == 0 || n_alternatives == 0)
2748    return 0;
2749
2750  insn_code_number = INSN_CODE (insn);
2751  this_insn_is_asm = insn_code_number < 0;
2752
2753  memcpy (operand_mode, recog_data.operand_mode,
2754	  noperands * sizeof (machine_mode));
2755  memcpy (constraints, recog_data.constraints,
2756	  noperands * sizeof (const char *));
2757
2758  commutative = -1;
2759
2760  /* If we will need to know, later, whether some pair of operands
2761     are the same, we must compare them now and save the result.
2762     Reloading the base and index registers will clobber them
2763     and afterward they will fail to match.  */
2764
2765  for (i = 0; i < noperands; i++)
2766    {
2767      const char *p;
2768      int c;
2769      char *end;
2770
2771      substed_operand[i] = recog_data.operand[i];
2772      p = constraints[i];
2773
2774      modified[i] = RELOAD_READ;
2775
2776      /* Scan this operand's constraint to see if it is an output operand,
2777	 an in-out operand, is commutative, or should match another.  */
2778
2779      while ((c = *p))
2780	{
2781	  p += CONSTRAINT_LEN (c, p);
2782	  switch (c)
2783	    {
2784	    case '=':
2785	      modified[i] = RELOAD_WRITE;
2786	      break;
2787	    case '+':
2788	      modified[i] = RELOAD_READ_WRITE;
2789	      break;
2790	    case '%':
2791	      {
2792		/* The last operand should not be marked commutative.  */
2793		gcc_assert (i != noperands - 1);
2794
2795		/* We currently only support one commutative pair of
2796		   operands.  Some existing asm code currently uses more
2797		   than one pair.  Previously, that would usually work,
2798		   but sometimes it would crash the compiler.  We
2799		   continue supporting that case as well as we can by
2800		   silently ignoring all but the first pair.  In the
2801		   future we may handle it correctly.  */
2802		if (commutative < 0)
2803		  commutative = i;
2804		else
2805		  gcc_assert (this_insn_is_asm);
2806	      }
2807	      break;
2808	    /* Use of ISDIGIT is tempting here, but it may get expensive because
2809	       of locale support we don't want.  */
2810	    case '0': case '1': case '2': case '3': case '4':
2811	    case '5': case '6': case '7': case '8': case '9':
2812	      {
2813		c = strtoul (p - 1, &end, 10);
2814		p = end;
2815
2816		operands_match[c][i]
2817		  = operands_match_p (recog_data.operand[c],
2818				      recog_data.operand[i]);
2819
2820		/* An operand may not match itself.  */
2821		gcc_assert (c != i);
2822
2823		/* If C can be commuted with C+1, and C might need to match I,
2824		   then C+1 might also need to match I.  */
2825		if (commutative >= 0)
2826		  {
2827		    if (c == commutative || c == commutative + 1)
2828		      {
2829			int other = c + (c == commutative ? 1 : -1);
2830			operands_match[other][i]
2831			  = operands_match_p (recog_data.operand[other],
2832					      recog_data.operand[i]);
2833		      }
2834		    if (i == commutative || i == commutative + 1)
2835		      {
2836			int other = i + (i == commutative ? 1 : -1);
2837			operands_match[c][other]
2838			  = operands_match_p (recog_data.operand[c],
2839					      recog_data.operand[other]);
2840		      }
2841		    /* Note that C is supposed to be less than I.
2842		       No need to consider altering both C and I because in
2843		       that case we would alter one into the other.  */
2844		  }
2845	      }
2846	    }
2847	}
2848    }
2849
2850  /* Examine each operand that is a memory reference or memory address
2851     and reload parts of the addresses into index registers.
2852     Also here any references to pseudo regs that didn't get hard regs
2853     but are equivalent to constants get replaced in the insn itself
2854     with those constants.  Nobody will ever see them again.
2855
2856     Finally, set up the preferred classes of each operand.  */
2857
2858  for (i = 0; i < noperands; i++)
2859    {
2860      RTX_CODE code = GET_CODE (recog_data.operand[i]);
2861
2862      address_reloaded[i] = 0;
2863      address_operand_reloaded[i] = 0;
2864      operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2865			 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2866			 : RELOAD_OTHER);
2867      address_type[i]
2868	= (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2869	   : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2870	   : RELOAD_OTHER);
2871
2872      if (*constraints[i] == 0)
2873	/* Ignore things like match_operator operands.  */
2874	;
2875      else if (insn_extra_address_constraint
2876	       (lookup_constraint (constraints[i])))
2877	{
2878	  address_operand_reloaded[i]
2879	    = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2880				    recog_data.operand[i],
2881				    recog_data.operand_loc[i],
2882				    i, operand_type[i], ind_levels, insn);
2883
2884	  /* If we now have a simple operand where we used to have a
2885	     PLUS or MULT, re-recognize and try again.  */
2886	  if ((OBJECT_P (*recog_data.operand_loc[i])
2887	       || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2888	      && (GET_CODE (recog_data.operand[i]) == MULT
2889		  || GET_CODE (recog_data.operand[i]) == PLUS))
2890	    {
2891	      INSN_CODE (insn) = -1;
2892	      retval = find_reloads (insn, replace, ind_levels, live_known,
2893				     reload_reg_p);
2894	      return retval;
2895	    }
2896
2897	  recog_data.operand[i] = *recog_data.operand_loc[i];
2898	  substed_operand[i] = recog_data.operand[i];
2899
2900	  /* Address operands are reloaded in their existing mode,
2901	     no matter what is specified in the machine description.  */
2902	  operand_mode[i] = GET_MODE (recog_data.operand[i]);
2903
2904	  /* If the address is a single CONST_INT pick address mode
2905	     instead otherwise we will later not know in which mode
2906	     the reload should be performed.  */
2907	  if (operand_mode[i] == VOIDmode)
2908	    operand_mode[i] = Pmode;
2909
2910	}
2911      else if (code == MEM)
2912	{
2913	  address_reloaded[i]
2914	    = find_reloads_address (GET_MODE (recog_data.operand[i]),
2915				    recog_data.operand_loc[i],
2916				    XEXP (recog_data.operand[i], 0),
2917				    &XEXP (recog_data.operand[i], 0),
2918				    i, address_type[i], ind_levels, insn);
2919	  recog_data.operand[i] = *recog_data.operand_loc[i];
2920	  substed_operand[i] = recog_data.operand[i];
2921	}
2922      else if (code == SUBREG)
2923	{
2924	  rtx reg = SUBREG_REG (recog_data.operand[i]);
2925	  rtx op
2926	    = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2927				   ind_levels,
2928				   set != 0
2929				   && &SET_DEST (set) == recog_data.operand_loc[i],
2930				   insn,
2931				   &address_reloaded[i]);
2932
2933	  /* If we made a MEM to load (a part of) the stackslot of a pseudo
2934	     that didn't get a hard register, emit a USE with a REG_EQUAL
2935	     note in front so that we might inherit a previous, possibly
2936	     wider reload.  */
2937
2938	  if (replace
2939	      && MEM_P (op)
2940	      && REG_P (reg)
2941	      && (GET_MODE_SIZE (GET_MODE (reg))
2942		  >= GET_MODE_SIZE (GET_MODE (op)))
2943	      && reg_equiv_constant (REGNO (reg)) == 0)
2944	    set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2945						   insn),
2946				 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2947
2948	  substed_operand[i] = recog_data.operand[i] = op;
2949	}
2950      else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2951	/* We can get a PLUS as an "operand" as a result of register
2952	   elimination.  See eliminate_regs and gen_reload.  We handle
2953	   a unary operator by reloading the operand.  */
2954	substed_operand[i] = recog_data.operand[i]
2955	  = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2956				 ind_levels, 0, insn,
2957				 &address_reloaded[i]);
2958      else if (code == REG)
2959	{
2960	  /* This is equivalent to calling find_reloads_toplev.
2961	     The code is duplicated for speed.
2962	     When we find a pseudo always equivalent to a constant,
2963	     we replace it by the constant.  We must be sure, however,
2964	     that we don't try to replace it in the insn in which it
2965	     is being set.  */
2966	  int regno = REGNO (recog_data.operand[i]);
2967	  if (reg_equiv_constant (regno) != 0
2968	      && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2969	    {
2970	      /* Record the existing mode so that the check if constants are
2971		 allowed will work when operand_mode isn't specified.  */
2972
2973	      if (operand_mode[i] == VOIDmode)
2974		operand_mode[i] = GET_MODE (recog_data.operand[i]);
2975
2976	      substed_operand[i] = recog_data.operand[i]
2977		= reg_equiv_constant (regno);
2978	    }
2979	  if (reg_equiv_memory_loc (regno) != 0
2980	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2981	    /* We need not give a valid is_set_dest argument since the case
2982	       of a constant equivalence was checked above.  */
2983	    substed_operand[i] = recog_data.operand[i]
2984	      = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2985				     ind_levels, 0, insn,
2986				     &address_reloaded[i]);
2987	}
2988      /* If the operand is still a register (we didn't replace it with an
2989	 equivalent), get the preferred class to reload it into.  */
2990      code = GET_CODE (recog_data.operand[i]);
2991      preferred_class[i]
2992	= ((code == REG && REGNO (recog_data.operand[i])
2993	    >= FIRST_PSEUDO_REGISTER)
2994	   ? reg_preferred_class (REGNO (recog_data.operand[i]))
2995	   : NO_REGS);
2996      pref_or_nothing[i]
2997	= (code == REG
2998	   && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2999	   && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
3000    }
3001
3002  /* If this is simply a copy from operand 1 to operand 0, merge the
3003     preferred classes for the operands.  */
3004  if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
3005      && recog_data.operand[1] == SET_SRC (set))
3006    {
3007      preferred_class[0] = preferred_class[1]
3008	= reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
3009      pref_or_nothing[0] |= pref_or_nothing[1];
3010      pref_or_nothing[1] |= pref_or_nothing[0];
3011    }
3012
3013  /* Now see what we need for pseudo-regs that didn't get hard regs
3014     or got the wrong kind of hard reg.  For this, we must consider
3015     all the operands together against the register constraints.  */
3016
3017  best = MAX_RECOG_OPERANDS * 2 + 600;
3018
3019  goal_alternative_swapped = 0;
3020
3021  /* The constraints are made of several alternatives.
3022     Each operand's constraint looks like foo,bar,... with commas
3023     separating the alternatives.  The first alternatives for all
3024     operands go together, the second alternatives go together, etc.
3025
3026     First loop over alternatives.  */
3027
3028  alternative_mask enabled = get_enabled_alternatives (insn);
3029  for (this_alternative_number = 0;
3030       this_alternative_number < n_alternatives;
3031       this_alternative_number++)
3032    {
3033      int swapped;
3034
3035      if (!TEST_BIT (enabled, this_alternative_number))
3036	{
3037	  int i;
3038
3039	  for (i = 0; i < recog_data.n_operands; i++)
3040	    constraints[i] = skip_alternative (constraints[i]);
3041
3042	  continue;
3043	}
3044
3045      /* If insn is commutative (it's safe to exchange a certain pair
3046	 of operands) then we need to try each alternative twice, the
3047	 second time matching those two operands as if we had
3048	 exchanged them.  To do this, really exchange them in
3049	 operands.  */
3050      for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3051	{
3052	  /* Loop over operands for one constraint alternative.  */
3053	  /* LOSERS counts those that don't fit this alternative
3054	     and would require loading.  */
3055	  int losers = 0;
3056	  /* BAD is set to 1 if it some operand can't fit this alternative
3057	     even after reloading.  */
3058	  int bad = 0;
3059	  /* REJECT is a count of how undesirable this alternative says it is
3060	     if any reloading is required.  If the alternative matches exactly
3061	     then REJECT is ignored, but otherwise it gets this much
3062	     counted against it in addition to the reloading needed.  Each
3063	     ? counts three times here since we want the disparaging caused by
3064	     a bad register class to only count 1/3 as much.  */
3065	  int reject = 0;
3066
3067	  if (swapped)
3068	    {
3069	      enum reg_class tclass;
3070	      int t;
3071
3072	      recog_data.operand[commutative] = substed_operand[commutative + 1];
3073	      recog_data.operand[commutative + 1] = substed_operand[commutative];
3074	      /* Swap the duplicates too.  */
3075	      for (i = 0; i < recog_data.n_dups; i++)
3076		if (recog_data.dup_num[i] == commutative
3077		    || recog_data.dup_num[i] == commutative + 1)
3078		  *recog_data.dup_loc[i]
3079		    = recog_data.operand[(int) recog_data.dup_num[i]];
3080
3081	      tclass = preferred_class[commutative];
3082	      preferred_class[commutative] = preferred_class[commutative + 1];
3083	      preferred_class[commutative + 1] = tclass;
3084
3085	      t = pref_or_nothing[commutative];
3086	      pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3087	      pref_or_nothing[commutative + 1] = t;
3088
3089	      t = address_reloaded[commutative];
3090	      address_reloaded[commutative] = address_reloaded[commutative + 1];
3091	      address_reloaded[commutative + 1] = t;
3092	    }
3093
3094	  this_earlyclobber = 0;
3095
3096	  for (i = 0; i < noperands; i++)
3097	    {
3098	      const char *p = constraints[i];
3099	      char *end;
3100	      int len;
3101	      int win = 0;
3102	      int did_match = 0;
3103	      /* 0 => this operand can be reloaded somehow for this alternative.  */
3104	      int badop = 1;
3105	      /* 0 => this operand can be reloaded if the alternative allows regs.  */
3106	      int winreg = 0;
3107	      int c;
3108	      int m;
3109	      rtx operand = recog_data.operand[i];
3110	      int offset = 0;
3111	      /* Nonzero means this is a MEM that must be reloaded into a reg
3112		 regardless of what the constraint says.  */
3113	      int force_reload = 0;
3114	      int offmemok = 0;
3115	      /* Nonzero if a constant forced into memory would be OK for this
3116		 operand.  */
3117	      int constmemok = 0;
3118	      int earlyclobber = 0;
3119	      enum constraint_num cn;
3120	      enum reg_class cl;
3121
3122	      /* If the predicate accepts a unary operator, it means that
3123		 we need to reload the operand, but do not do this for
3124		 match_operator and friends.  */
3125	      if (UNARY_P (operand) && *p != 0)
3126		operand = XEXP (operand, 0);
3127
3128	      /* If the operand is a SUBREG, extract
3129		 the REG or MEM (or maybe even a constant) within.
3130		 (Constants can occur as a result of reg_equiv_constant.)  */
3131
3132	      while (GET_CODE (operand) == SUBREG)
3133		{
3134		  /* Offset only matters when operand is a REG and
3135		     it is a hard reg.  This is because it is passed
3136		     to reg_fits_class_p if it is a REG and all pseudos
3137		     return 0 from that function.  */
3138		  if (REG_P (SUBREG_REG (operand))
3139		      && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3140		    {
3141		      if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3142						 GET_MODE (SUBREG_REG (operand)),
3143						 SUBREG_BYTE (operand),
3144						 GET_MODE (operand)) < 0)
3145			force_reload = 1;
3146		      offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3147						     GET_MODE (SUBREG_REG (operand)),
3148						     SUBREG_BYTE (operand),
3149						     GET_MODE (operand));
3150		    }
3151		  operand = SUBREG_REG (operand);
3152		  /* Force reload if this is a constant or PLUS or if there may
3153		     be a problem accessing OPERAND in the outer mode.  */
3154		  if (CONSTANT_P (operand)
3155		      || GET_CODE (operand) == PLUS
3156		      /* We must force a reload of paradoxical SUBREGs
3157			 of a MEM because the alignment of the inner value
3158			 may not be enough to do the outer reference.  On
3159			 big-endian machines, it may also reference outside
3160			 the object.
3161
3162			 On machines that extend byte operations and we have a
3163			 SUBREG where both the inner and outer modes are no wider
3164			 than a word and the inner mode is narrower, is integral,
3165			 and gets extended when loaded from memory, combine.c has
3166			 made assumptions about the behavior of the machine in such
3167			 register access.  If the data is, in fact, in memory we
3168			 must always load using the size assumed to be in the
3169			 register and let the insn do the different-sized
3170			 accesses.
3171
3172			 This is doubly true if WORD_REGISTER_OPERATIONS.  In
3173			 this case eliminate_regs has left non-paradoxical
3174			 subregs for push_reload to see.  Make sure it does
3175			 by forcing the reload.
3176
3177			 ??? When is it right at this stage to have a subreg
3178			 of a mem that is _not_ to be handled specially?  IMO
3179			 those should have been reduced to just a mem.  */
3180		      || ((MEM_P (operand)
3181			   || (REG_P (operand)
3182			       && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3183#ifndef WORD_REGISTER_OPERATIONS
3184			  && (((GET_MODE_BITSIZE (GET_MODE (operand))
3185				< BIGGEST_ALIGNMENT)
3186			       && (GET_MODE_SIZE (operand_mode[i])
3187				   > GET_MODE_SIZE (GET_MODE (operand))))
3188			      || BYTES_BIG_ENDIAN
3189#ifdef LOAD_EXTEND_OP
3190			      || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3191				  && (GET_MODE_SIZE (GET_MODE (operand))
3192				      <= UNITS_PER_WORD)
3193				  && (GET_MODE_SIZE (operand_mode[i])
3194				      > GET_MODE_SIZE (GET_MODE (operand)))
3195				  && INTEGRAL_MODE_P (GET_MODE (operand))
3196				  && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3197#endif
3198			      )
3199#endif
3200			  )
3201		      )
3202		    force_reload = 1;
3203		}
3204
3205	      this_alternative[i] = NO_REGS;
3206	      this_alternative_win[i] = 0;
3207	      this_alternative_match_win[i] = 0;
3208	      this_alternative_offmemok[i] = 0;
3209	      this_alternative_earlyclobber[i] = 0;
3210	      this_alternative_matches[i] = -1;
3211
3212	      /* An empty constraint or empty alternative
3213		 allows anything which matched the pattern.  */
3214	      if (*p == 0 || *p == ',')
3215		win = 1, badop = 0;
3216
3217	      /* Scan this alternative's specs for this operand;
3218		 set WIN if the operand fits any letter in this alternative.
3219		 Otherwise, clear BADOP if this operand could
3220		 fit some letter after reloads,
3221		 or set WINREG if this operand could fit after reloads
3222		 provided the constraint allows some registers.  */
3223
3224	      do
3225		switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3226		  {
3227		  case '\0':
3228		    len = 0;
3229		    break;
3230		  case ',':
3231		    c = '\0';
3232		    break;
3233
3234		  case '?':
3235		    reject += 6;
3236		    break;
3237
3238		  case '!':
3239		    reject = 600;
3240		    break;
3241
3242		  case '#':
3243		    /* Ignore rest of this alternative as far as
3244		       reloading is concerned.  */
3245		    do
3246		      p++;
3247		    while (*p && *p != ',');
3248		    len = 0;
3249		    break;
3250
3251		  case '0':  case '1':  case '2':  case '3':  case '4':
3252		  case '5':  case '6':  case '7':  case '8':  case '9':
3253		    m = strtoul (p, &end, 10);
3254		    p = end;
3255		    len = 0;
3256
3257		    this_alternative_matches[i] = m;
3258		    /* We are supposed to match a previous operand.
3259		       If we do, we win if that one did.
3260		       If we do not, count both of the operands as losers.
3261		       (This is too conservative, since most of the time
3262		       only a single reload insn will be needed to make
3263		       the two operands win.  As a result, this alternative
3264		       may be rejected when it is actually desirable.)  */
3265		    if ((swapped && (m != commutative || i != commutative + 1))
3266			/* If we are matching as if two operands were swapped,
3267			   also pretend that operands_match had been computed
3268			   with swapped.
3269			   But if I is the second of those and C is the first,
3270			   don't exchange them, because operands_match is valid
3271			   only on one side of its diagonal.  */
3272			? (operands_match
3273			   [(m == commutative || m == commutative + 1)
3274			    ? 2 * commutative + 1 - m : m]
3275			   [(i == commutative || i == commutative + 1)
3276			    ? 2 * commutative + 1 - i : i])
3277			: operands_match[m][i])
3278		      {
3279			/* If we are matching a non-offsettable address where an
3280			   offsettable address was expected, then we must reject
3281			   this combination, because we can't reload it.  */
3282			if (this_alternative_offmemok[m]
3283			    && MEM_P (recog_data.operand[m])
3284			    && this_alternative[m] == NO_REGS
3285			    && ! this_alternative_win[m])
3286			  bad = 1;
3287
3288			did_match = this_alternative_win[m];
3289		      }
3290		    else
3291		      {
3292			/* Operands don't match.  */
3293			rtx value;
3294			int loc1, loc2;
3295			/* Retroactively mark the operand we had to match
3296			   as a loser, if it wasn't already.  */
3297			if (this_alternative_win[m])
3298			  losers++;
3299			this_alternative_win[m] = 0;
3300			if (this_alternative[m] == NO_REGS)
3301			  bad = 1;
3302			/* But count the pair only once in the total badness of
3303			   this alternative, if the pair can be a dummy reload.
3304			   The pointers in operand_loc are not swapped; swap
3305			   them by hand if necessary.  */
3306			if (swapped && i == commutative)
3307			  loc1 = commutative + 1;
3308			else if (swapped && i == commutative + 1)
3309			  loc1 = commutative;
3310			else
3311			  loc1 = i;
3312			if (swapped && m == commutative)
3313			  loc2 = commutative + 1;
3314			else if (swapped && m == commutative + 1)
3315			  loc2 = commutative;
3316			else
3317			  loc2 = m;
3318			value
3319			  = find_dummy_reload (recog_data.operand[i],
3320					       recog_data.operand[m],
3321					       recog_data.operand_loc[loc1],
3322					       recog_data.operand_loc[loc2],
3323					       operand_mode[i], operand_mode[m],
3324					       this_alternative[m], -1,
3325					       this_alternative_earlyclobber[m]);
3326
3327			if (value != 0)
3328			  losers--;
3329		      }
3330		    /* This can be fixed with reloads if the operand
3331		       we are supposed to match can be fixed with reloads.  */
3332		    badop = 0;
3333		    this_alternative[i] = this_alternative[m];
3334
3335		    /* If we have to reload this operand and some previous
3336		       operand also had to match the same thing as this
3337		       operand, we don't know how to do that.  So reject this
3338		       alternative.  */
3339		    if (! did_match || force_reload)
3340		      for (j = 0; j < i; j++)
3341			if (this_alternative_matches[j]
3342			    == this_alternative_matches[i])
3343			  {
3344			    badop = 1;
3345			    break;
3346			  }
3347		    break;
3348
3349		  case 'p':
3350		    /* All necessary reloads for an address_operand
3351		       were handled in find_reloads_address.  */
3352		    this_alternative[i]
3353		      = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3354					ADDRESS, SCRATCH);
3355		    win = 1;
3356		    badop = 0;
3357		    break;
3358
3359		  case TARGET_MEM_CONSTRAINT:
3360		    if (force_reload)
3361		      break;
3362		    if (MEM_P (operand)
3363			|| (REG_P (operand)
3364			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3365			    && reg_renumber[REGNO (operand)] < 0))
3366		      win = 1;
3367		    if (CONST_POOL_OK_P (operand_mode[i], operand))
3368		      badop = 0;
3369		    constmemok = 1;
3370		    break;
3371
3372		  case '<':
3373		    if (MEM_P (operand)
3374			&& ! address_reloaded[i]
3375			&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3376			    || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3377		      win = 1;
3378		    break;
3379
3380		  case '>':
3381		    if (MEM_P (operand)
3382			&& ! address_reloaded[i]
3383			&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
3384			    || GET_CODE (XEXP (operand, 0)) == POST_INC))
3385		      win = 1;
3386		    break;
3387
3388		    /* Memory operand whose address is not offsettable.  */
3389		  case 'V':
3390		    if (force_reload)
3391		      break;
3392		    if (MEM_P (operand)
3393			&& ! (ind_levels ? offsettable_memref_p (operand)
3394			      : offsettable_nonstrict_memref_p (operand))
3395			/* Certain mem addresses will become offsettable
3396			   after they themselves are reloaded.  This is important;
3397			   we don't want our own handling of unoffsettables
3398			   to override the handling of reg_equiv_address.  */
3399			&& !(REG_P (XEXP (operand, 0))
3400			     && (ind_levels == 0
3401				 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3402		      win = 1;
3403		    break;
3404
3405		    /* Memory operand whose address is offsettable.  */
3406		  case 'o':
3407		    if (force_reload)
3408		      break;
3409		    if ((MEM_P (operand)
3410			 /* If IND_LEVELS, find_reloads_address won't reload a
3411			    pseudo that didn't get a hard reg, so we have to
3412			    reject that case.  */
3413			 && ((ind_levels ? offsettable_memref_p (operand)
3414			      : offsettable_nonstrict_memref_p (operand))
3415			     /* A reloaded address is offsettable because it is now
3416				just a simple register indirect.  */
3417			     || address_reloaded[i] == 1))
3418			|| (REG_P (operand)
3419			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3420			    && reg_renumber[REGNO (operand)] < 0
3421			    /* If reg_equiv_address is nonzero, we will be
3422			       loading it into a register; hence it will be
3423			       offsettable, but we cannot say that reg_equiv_mem
3424			       is offsettable without checking.  */
3425			    && ((reg_equiv_mem (REGNO (operand)) != 0
3426				 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3427				|| (reg_equiv_address (REGNO (operand)) != 0))))
3428		      win = 1;
3429		    if (CONST_POOL_OK_P (operand_mode[i], operand)
3430			|| MEM_P (operand))
3431		      badop = 0;
3432		    constmemok = 1;
3433		    offmemok = 1;
3434		    break;
3435
3436		  case '&':
3437		    /* Output operand that is stored before the need for the
3438		       input operands (and their index registers) is over.  */
3439		    earlyclobber = 1, this_earlyclobber = 1;
3440		    break;
3441
3442		  case 'X':
3443		    force_reload = 0;
3444		    win = 1;
3445		    break;
3446
3447		  case 'g':
3448		    if (! force_reload
3449			/* A PLUS is never a valid operand, but reload can make
3450			   it from a register when eliminating registers.  */
3451			&& GET_CODE (operand) != PLUS
3452			/* A SCRATCH is not a valid operand.  */
3453			&& GET_CODE (operand) != SCRATCH
3454			&& (! CONSTANT_P (operand)
3455			    || ! flag_pic
3456			    || LEGITIMATE_PIC_OPERAND_P (operand))
3457			&& (GENERAL_REGS == ALL_REGS
3458			    || !REG_P (operand)
3459			    || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3460				&& reg_renumber[REGNO (operand)] < 0)))
3461		      win = 1;
3462		    cl = GENERAL_REGS;
3463		    goto reg;
3464
3465		  default:
3466		    cn = lookup_constraint (p);
3467		    switch (get_constraint_type (cn))
3468		      {
3469		      case CT_REGISTER:
3470			cl = reg_class_for_constraint (cn);
3471			if (cl != NO_REGS)
3472			  goto reg;
3473			break;
3474
3475		      case CT_CONST_INT:
3476			if (CONST_INT_P (operand)
3477			    && (insn_const_int_ok_for_constraint
3478				(INTVAL (operand), cn)))
3479			  win = true;
3480			break;
3481
3482		      case CT_MEMORY:
3483			if (force_reload)
3484			  break;
3485			if (constraint_satisfied_p (operand, cn))
3486			  win = 1;
3487			/* If the address was already reloaded,
3488			   we win as well.  */
3489			else if (MEM_P (operand) && address_reloaded[i] == 1)
3490			  win = 1;
3491			/* Likewise if the address will be reloaded because
3492			   reg_equiv_address is nonzero.  For reg_equiv_mem
3493			   we have to check.  */
3494			else if (REG_P (operand)
3495				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3496				 && reg_renumber[REGNO (operand)] < 0
3497				 && ((reg_equiv_mem (REGNO (operand)) != 0
3498				      && (constraint_satisfied_p
3499					  (reg_equiv_mem (REGNO (operand)),
3500					   cn)))
3501				     || (reg_equiv_address (REGNO (operand))
3502					 != 0)))
3503			  win = 1;
3504
3505			/* If we didn't already win, we can reload
3506			   constants via force_const_mem, and other
3507			   MEMs by reloading the address like for 'o'.  */
3508			if (CONST_POOL_OK_P (operand_mode[i], operand)
3509			    || MEM_P (operand))
3510			  badop = 0;
3511			constmemok = 1;
3512			offmemok = 1;
3513			break;
3514
3515		      case CT_ADDRESS:
3516			if (constraint_satisfied_p (operand, cn))
3517			  win = 1;
3518
3519			/* If we didn't already win, we can reload
3520			   the address into a base register.  */
3521			this_alternative[i]
3522			  = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3523					    ADDRESS, SCRATCH);
3524			badop = 0;
3525			break;
3526
3527		      case CT_FIXED_FORM:
3528			if (constraint_satisfied_p (operand, cn))
3529			  win = 1;
3530			break;
3531		      }
3532		    break;
3533
3534		  reg:
3535		    this_alternative[i]
3536		      = reg_class_subunion[this_alternative[i]][cl];
3537		    if (GET_MODE (operand) == BLKmode)
3538		      break;
3539		    winreg = 1;
3540		    if (REG_P (operand)
3541			&& reg_fits_class_p (operand, this_alternative[i],
3542  			             offset, GET_MODE (recog_data.operand[i])))
3543		      win = 1;
3544		    break;
3545		  }
3546	      while ((p += len), c);
3547
3548	      if (swapped == (commutative >= 0 ? 1 : 0))
3549		constraints[i] = p;
3550
3551	      /* If this operand could be handled with a reg,
3552		 and some reg is allowed, then this operand can be handled.  */
3553	      if (winreg && this_alternative[i] != NO_REGS
3554		  && (win || !class_only_fixed_regs[this_alternative[i]]))
3555		badop = 0;
3556
3557	      /* Record which operands fit this alternative.  */
3558	      this_alternative_earlyclobber[i] = earlyclobber;
3559	      if (win && ! force_reload)
3560		this_alternative_win[i] = 1;
3561	      else if (did_match && ! force_reload)
3562		this_alternative_match_win[i] = 1;
3563	      else
3564		{
3565		  int const_to_mem = 0;
3566
3567		  this_alternative_offmemok[i] = offmemok;
3568		  losers++;
3569		  if (badop)
3570		    bad = 1;
3571		  /* Alternative loses if it has no regs for a reg operand.  */
3572		  if (REG_P (operand)
3573		      && this_alternative[i] == NO_REGS
3574		      && this_alternative_matches[i] < 0)
3575		    bad = 1;
3576
3577		  /* If this is a constant that is reloaded into the desired
3578		     class by copying it to memory first, count that as another
3579		     reload.  This is consistent with other code and is
3580		     required to avoid choosing another alternative when
3581		     the constant is moved into memory by this function on
3582		     an early reload pass.  Note that the test here is
3583		     precisely the same as in the code below that calls
3584		     force_const_mem.  */
3585		  if (CONST_POOL_OK_P (operand_mode[i], operand)
3586		      && ((targetm.preferred_reload_class (operand,
3587							   this_alternative[i])
3588			   == NO_REGS)
3589			  || no_input_reloads))
3590		    {
3591		      const_to_mem = 1;
3592		      if (this_alternative[i] != NO_REGS)
3593			losers++;
3594		    }
3595
3596		  /* Alternative loses if it requires a type of reload not
3597		     permitted for this insn.  We can always reload SCRATCH
3598		     and objects with a REG_UNUSED note.  */
3599		  if (GET_CODE (operand) != SCRATCH
3600		      && modified[i] != RELOAD_READ && no_output_reloads
3601		      && ! find_reg_note (insn, REG_UNUSED, operand))
3602		    bad = 1;
3603		  else if (modified[i] != RELOAD_WRITE && no_input_reloads
3604			   && ! const_to_mem)
3605		    bad = 1;
3606
3607		  /* If we can't reload this value at all, reject this
3608		     alternative.  Note that we could also lose due to
3609		     LIMIT_RELOAD_CLASS, but we don't check that
3610		     here.  */
3611
3612		  if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3613		    {
3614		      if (targetm.preferred_reload_class (operand,
3615							  this_alternative[i])
3616			  == NO_REGS)
3617			reject = 600;
3618
3619		      if (operand_type[i] == RELOAD_FOR_OUTPUT
3620			  && (targetm.preferred_output_reload_class (operand,
3621							    this_alternative[i])
3622			      == NO_REGS))
3623			reject = 600;
3624		    }
3625
3626		  /* We prefer to reload pseudos over reloading other things,
3627		     since such reloads may be able to be eliminated later.
3628		     If we are reloading a SCRATCH, we won't be generating any
3629		     insns, just using a register, so it is also preferred.
3630		     So bump REJECT in other cases.  Don't do this in the
3631		     case where we are forcing a constant into memory and
3632		     it will then win since we don't want to have a different
3633		     alternative match then.  */
3634		  if (! (REG_P (operand)
3635			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3636		      && GET_CODE (operand) != SCRATCH
3637		      && ! (const_to_mem && constmemok))
3638		    reject += 2;
3639
3640		  /* Input reloads can be inherited more often than output
3641		     reloads can be removed, so penalize output reloads.  */
3642		  if (operand_type[i] != RELOAD_FOR_INPUT
3643		      && GET_CODE (operand) != SCRATCH)
3644		    reject++;
3645		}
3646
3647	      /* If this operand is a pseudo register that didn't get
3648		 a hard reg and this alternative accepts some
3649		 register, see if the class that we want is a subset
3650		 of the preferred class for this register.  If not,
3651		 but it intersects that class, use the preferred class
3652		 instead.  If it does not intersect the preferred
3653		 class, show that usage of this alternative should be
3654		 discouraged; it will be discouraged more still if the
3655		 register is `preferred or nothing'.  We do this
3656		 because it increases the chance of reusing our spill
3657		 register in a later insn and avoiding a pair of
3658		 memory stores and loads.
3659
3660		 Don't bother with this if this alternative will
3661		 accept this operand.
3662
3663		 Don't do this for a multiword operand, since it is
3664		 only a small win and has the risk of requiring more
3665		 spill registers, which could cause a large loss.
3666
3667		 Don't do this if the preferred class has only one
3668		 register because we might otherwise exhaust the
3669		 class.  */
3670
3671	      if (! win && ! did_match
3672		  && this_alternative[i] != NO_REGS
3673		  && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3674		  && reg_class_size [(int) preferred_class[i]] > 0
3675		  && ! small_register_class_p (preferred_class[i]))
3676		{
3677		  if (! reg_class_subset_p (this_alternative[i],
3678					    preferred_class[i]))
3679		    {
3680		      /* Since we don't have a way of forming the intersection,
3681			 we just do something special if the preferred class
3682			 is a subset of the class we have; that's the most
3683			 common case anyway.  */
3684		      if (reg_class_subset_p (preferred_class[i],
3685					      this_alternative[i]))
3686			this_alternative[i] = preferred_class[i];
3687		      else
3688			reject += (2 + 2 * pref_or_nothing[i]);
3689		    }
3690		}
3691	    }
3692
3693	  /* Now see if any output operands that are marked "earlyclobber"
3694	     in this alternative conflict with any input operands
3695	     or any memory addresses.  */
3696
3697	  for (i = 0; i < noperands; i++)
3698	    if (this_alternative_earlyclobber[i]
3699		&& (this_alternative_win[i] || this_alternative_match_win[i]))
3700	      {
3701		struct decomposition early_data;
3702
3703		early_data = decompose (recog_data.operand[i]);
3704
3705		gcc_assert (modified[i] != RELOAD_READ);
3706
3707		if (this_alternative[i] == NO_REGS)
3708		  {
3709		    this_alternative_earlyclobber[i] = 0;
3710		    gcc_assert (this_insn_is_asm);
3711		    error_for_asm (this_insn,
3712			      "%<&%> constraint used with no register class");
3713		  }
3714
3715		for (j = 0; j < noperands; j++)
3716		  /* Is this an input operand or a memory ref?  */
3717		  if ((MEM_P (recog_data.operand[j])
3718		       || modified[j] != RELOAD_WRITE)
3719		      && j != i
3720		      /* Ignore things like match_operator operands.  */
3721		      && !recog_data.is_operator[j]
3722		      /* Don't count an input operand that is constrained to match
3723			 the early clobber operand.  */
3724		      && ! (this_alternative_matches[j] == i
3725			    && rtx_equal_p (recog_data.operand[i],
3726					    recog_data.operand[j]))
3727		      /* Is it altered by storing the earlyclobber operand?  */
3728		      && !immune_p (recog_data.operand[j], recog_data.operand[i],
3729				    early_data))
3730		    {
3731		      /* If the output is in a non-empty few-regs class,
3732			 it's costly to reload it, so reload the input instead.  */
3733		      if (small_register_class_p (this_alternative[i])
3734			  && (REG_P (recog_data.operand[j])
3735			      || GET_CODE (recog_data.operand[j]) == SUBREG))
3736			{
3737			  losers++;
3738			  this_alternative_win[j] = 0;
3739			  this_alternative_match_win[j] = 0;
3740			}
3741		      else
3742			break;
3743		    }
3744		/* If an earlyclobber operand conflicts with something,
3745		   it must be reloaded, so request this and count the cost.  */
3746		if (j != noperands)
3747		  {
3748		    losers++;
3749		    this_alternative_win[i] = 0;
3750		    this_alternative_match_win[j] = 0;
3751		    for (j = 0; j < noperands; j++)
3752		      if (this_alternative_matches[j] == i
3753			  && this_alternative_match_win[j])
3754			{
3755			  this_alternative_win[j] = 0;
3756			  this_alternative_match_win[j] = 0;
3757			  losers++;
3758			}
3759		  }
3760	      }
3761
3762	  /* If one alternative accepts all the operands, no reload required,
3763	     choose that alternative; don't consider the remaining ones.  */
3764	  if (losers == 0)
3765	    {
3766	      /* Unswap these so that they are never swapped at `finish'.  */
3767	      if (swapped)
3768		{
3769		  recog_data.operand[commutative] = substed_operand[commutative];
3770		  recog_data.operand[commutative + 1]
3771		    = substed_operand[commutative + 1];
3772		}
3773	      for (i = 0; i < noperands; i++)
3774		{
3775		  goal_alternative_win[i] = this_alternative_win[i];
3776		  goal_alternative_match_win[i] = this_alternative_match_win[i];
3777		  goal_alternative[i] = this_alternative[i];
3778		  goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3779		  goal_alternative_matches[i] = this_alternative_matches[i];
3780		  goal_alternative_earlyclobber[i]
3781		    = this_alternative_earlyclobber[i];
3782		}
3783	      goal_alternative_number = this_alternative_number;
3784	      goal_alternative_swapped = swapped;
3785	      goal_earlyclobber = this_earlyclobber;
3786	      goto finish;
3787	    }
3788
3789	  /* REJECT, set by the ! and ? constraint characters and when a register
3790	     would be reloaded into a non-preferred class, discourages the use of
3791	     this alternative for a reload goal.  REJECT is incremented by six
3792	     for each ? and two for each non-preferred class.  */
3793	  losers = losers * 6 + reject;
3794
3795	  /* If this alternative can be made to work by reloading,
3796	     and it needs less reloading than the others checked so far,
3797	     record it as the chosen goal for reloading.  */
3798	  if (! bad)
3799	    {
3800	      if (best > losers)
3801		{
3802		  for (i = 0; i < noperands; i++)
3803		    {
3804		      goal_alternative[i] = this_alternative[i];
3805		      goal_alternative_win[i] = this_alternative_win[i];
3806		      goal_alternative_match_win[i]
3807			= this_alternative_match_win[i];
3808		      goal_alternative_offmemok[i]
3809			= this_alternative_offmemok[i];
3810		      goal_alternative_matches[i] = this_alternative_matches[i];
3811		      goal_alternative_earlyclobber[i]
3812			= this_alternative_earlyclobber[i];
3813		    }
3814		  goal_alternative_swapped = swapped;
3815		  best = losers;
3816		  goal_alternative_number = this_alternative_number;
3817		  goal_earlyclobber = this_earlyclobber;
3818		}
3819	    }
3820
3821	  if (swapped)
3822	    {
3823	      enum reg_class tclass;
3824	      int t;
3825
3826	      /* If the commutative operands have been swapped, swap
3827		 them back in order to check the next alternative.  */
3828	      recog_data.operand[commutative] = substed_operand[commutative];
3829	      recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3830	      /* Unswap the duplicates too.  */
3831	      for (i = 0; i < recog_data.n_dups; i++)
3832		if (recog_data.dup_num[i] == commutative
3833		    || recog_data.dup_num[i] == commutative + 1)
3834		  *recog_data.dup_loc[i]
3835		    = recog_data.operand[(int) recog_data.dup_num[i]];
3836
3837	      /* Unswap the operand related information as well.  */
3838	      tclass = preferred_class[commutative];
3839	      preferred_class[commutative] = preferred_class[commutative + 1];
3840	      preferred_class[commutative + 1] = tclass;
3841
3842	      t = pref_or_nothing[commutative];
3843	      pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3844	      pref_or_nothing[commutative + 1] = t;
3845
3846	      t = address_reloaded[commutative];
3847	      address_reloaded[commutative] = address_reloaded[commutative + 1];
3848	      address_reloaded[commutative + 1] = t;
3849	    }
3850	}
3851    }
3852
3853  /* The operands don't meet the constraints.
3854     goal_alternative describes the alternative
3855     that we could reach by reloading the fewest operands.
3856     Reload so as to fit it.  */
3857
3858  if (best == MAX_RECOG_OPERANDS * 2 + 600)
3859    {
3860      /* No alternative works with reloads??  */
3861      if (insn_code_number >= 0)
3862	fatal_insn ("unable to generate reloads for:", insn);
3863      error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3864      /* Avoid further trouble with this insn.  */
3865      PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3866      n_reloads = 0;
3867      return 0;
3868    }
3869
3870  /* Jump to `finish' from above if all operands are valid already.
3871     In that case, goal_alternative_win is all 1.  */
3872 finish:
3873
3874  /* Right now, for any pair of operands I and J that are required to match,
3875     with I < J,
3876     goal_alternative_matches[J] is I.
3877     Set up goal_alternative_matched as the inverse function:
3878     goal_alternative_matched[I] = J.  */
3879
3880  for (i = 0; i < noperands; i++)
3881    goal_alternative_matched[i] = -1;
3882
3883  for (i = 0; i < noperands; i++)
3884    if (! goal_alternative_win[i]
3885	&& goal_alternative_matches[i] >= 0)
3886      goal_alternative_matched[goal_alternative_matches[i]] = i;
3887
3888  for (i = 0; i < noperands; i++)
3889    goal_alternative_win[i] |= goal_alternative_match_win[i];
3890
3891  /* If the best alternative is with operands 1 and 2 swapped,
3892     consider them swapped before reporting the reloads.  Update the
3893     operand numbers of any reloads already pushed.  */
3894
3895  if (goal_alternative_swapped)
3896    {
3897      rtx tem;
3898
3899      tem = substed_operand[commutative];
3900      substed_operand[commutative] = substed_operand[commutative + 1];
3901      substed_operand[commutative + 1] = tem;
3902      tem = recog_data.operand[commutative];
3903      recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3904      recog_data.operand[commutative + 1] = tem;
3905      tem = *recog_data.operand_loc[commutative];
3906      *recog_data.operand_loc[commutative]
3907	= *recog_data.operand_loc[commutative + 1];
3908      *recog_data.operand_loc[commutative + 1] = tem;
3909
3910      for (i = 0; i < n_reloads; i++)
3911	{
3912	  if (rld[i].opnum == commutative)
3913	    rld[i].opnum = commutative + 1;
3914	  else if (rld[i].opnum == commutative + 1)
3915	    rld[i].opnum = commutative;
3916	}
3917    }
3918
3919  for (i = 0; i < noperands; i++)
3920    {
3921      operand_reloadnum[i] = -1;
3922
3923      /* If this is an earlyclobber operand, we need to widen the scope.
3924	 The reload must remain valid from the start of the insn being
3925	 reloaded until after the operand is stored into its destination.
3926	 We approximate this with RELOAD_OTHER even though we know that we
3927	 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3928
3929	 One special case that is worth checking is when we have an
3930	 output that is earlyclobber but isn't used past the insn (typically
3931	 a SCRATCH).  In this case, we only need have the reload live
3932	 through the insn itself, but not for any of our input or output
3933	 reloads.
3934	 But we must not accidentally narrow the scope of an existing
3935	 RELOAD_OTHER reload - leave these alone.
3936
3937	 In any case, anything needed to address this operand can remain
3938	 however they were previously categorized.  */
3939
3940      if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3941	operand_type[i]
3942	  = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3943	     ? RELOAD_FOR_INSN : RELOAD_OTHER);
3944    }
3945
3946  /* Any constants that aren't allowed and can't be reloaded
3947     into registers are here changed into memory references.  */
3948  for (i = 0; i < noperands; i++)
3949    if (! goal_alternative_win[i])
3950      {
3951	rtx op = recog_data.operand[i];
3952	rtx subreg = NULL_RTX;
3953	rtx plus = NULL_RTX;
3954	machine_mode mode = operand_mode[i];
3955
3956	/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3957	   push_reload so we have to let them pass here.  */
3958	if (GET_CODE (op) == SUBREG)
3959	  {
3960	    subreg = op;
3961	    op = SUBREG_REG (op);
3962	    mode = GET_MODE (op);
3963	  }
3964
3965	if (GET_CODE (op) == PLUS)
3966	  {
3967	    plus = op;
3968	    op = XEXP (op, 1);
3969	  }
3970
3971	if (CONST_POOL_OK_P (mode, op)
3972	    && ((targetm.preferred_reload_class (op, goal_alternative[i])
3973		 == NO_REGS)
3974		|| no_input_reloads))
3975	  {
3976	    int this_address_reloaded;
3977	    rtx tem = force_const_mem (mode, op);
3978
3979	    /* If we stripped a SUBREG or a PLUS above add it back.  */
3980	    if (plus != NULL_RTX)
3981	      tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3982
3983	    if (subreg != NULL_RTX)
3984	      tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3985
3986	    this_address_reloaded = 0;
3987	    substed_operand[i] = recog_data.operand[i]
3988	      = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3989				     0, insn, &this_address_reloaded);
3990
3991	    /* If the alternative accepts constant pool refs directly
3992	       there will be no reload needed at all.  */
3993	    if (plus == NULL_RTX
3994		&& subreg == NULL_RTX
3995		&& alternative_allows_const_pool_ref (this_address_reloaded == 0
3996						      ? substed_operand[i]
3997						      : NULL,
3998						      recog_data.constraints[i],
3999						      goal_alternative_number))
4000	      goal_alternative_win[i] = 1;
4001	  }
4002      }
4003
4004  /* Record the values of the earlyclobber operands for the caller.  */
4005  if (goal_earlyclobber)
4006    for (i = 0; i < noperands; i++)
4007      if (goal_alternative_earlyclobber[i])
4008	reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
4009
4010  /* Now record reloads for all the operands that need them.  */
4011  for (i = 0; i < noperands; i++)
4012    if (! goal_alternative_win[i])
4013      {
4014	/* Operands that match previous ones have already been handled.  */
4015	if (goal_alternative_matches[i] >= 0)
4016	  ;
4017	/* Handle an operand with a nonoffsettable address
4018	   appearing where an offsettable address will do
4019	   by reloading the address into a base register.
4020
4021	   ??? We can also do this when the operand is a register and
4022	   reg_equiv_mem is not offsettable, but this is a bit tricky,
4023	   so we don't bother with it.  It may not be worth doing.  */
4024	else if (goal_alternative_matched[i] == -1
4025		 && goal_alternative_offmemok[i]
4026		 && MEM_P (recog_data.operand[i]))
4027	  {
4028	    /* If the address to be reloaded is a VOIDmode constant,
4029	       use the default address mode as mode of the reload register,
4030	       as would have been done by find_reloads_address.  */
4031	    addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4032	    machine_mode address_mode;
4033
4034	    address_mode = get_address_mode (recog_data.operand[i]);
4035	    operand_reloadnum[i]
4036	      = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4037			     &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4038			     base_reg_class (VOIDmode, as, MEM, SCRATCH),
4039			     address_mode,
4040			     VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4041	    rld[operand_reloadnum[i]].inc
4042	      = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4043
4044	    /* If this operand is an output, we will have made any
4045	       reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4046	       now we are treating part of the operand as an input, so
4047	       we must change these to RELOAD_FOR_INPUT_ADDRESS.  */
4048
4049	    if (modified[i] == RELOAD_WRITE)
4050	      {
4051		for (j = 0; j < n_reloads; j++)
4052		  {
4053		    if (rld[j].opnum == i)
4054		      {
4055			if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4056			  rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4057			else if (rld[j].when_needed
4058				 == RELOAD_FOR_OUTADDR_ADDRESS)
4059			  rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4060		      }
4061		  }
4062	      }
4063	  }
4064	else if (goal_alternative_matched[i] == -1)
4065	  {
4066	    operand_reloadnum[i]
4067	      = push_reload ((modified[i] != RELOAD_WRITE
4068			      ? recog_data.operand[i] : 0),
4069			     (modified[i] != RELOAD_READ
4070			      ? recog_data.operand[i] : 0),
4071			     (modified[i] != RELOAD_WRITE
4072			      ? recog_data.operand_loc[i] : 0),
4073			     (modified[i] != RELOAD_READ
4074			      ? recog_data.operand_loc[i] : 0),
4075			     (enum reg_class) goal_alternative[i],
4076			     (modified[i] == RELOAD_WRITE
4077			      ? VOIDmode : operand_mode[i]),
4078			     (modified[i] == RELOAD_READ
4079			      ? VOIDmode : operand_mode[i]),
4080			     (insn_code_number < 0 ? 0
4081			      : insn_data[insn_code_number].operand[i].strict_low),
4082			     0, i, operand_type[i]);
4083	  }
4084	/* In a matching pair of operands, one must be input only
4085	   and the other must be output only.
4086	   Pass the input operand as IN and the other as OUT.  */
4087	else if (modified[i] == RELOAD_READ
4088		 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4089	  {
4090	    operand_reloadnum[i]
4091	      = push_reload (recog_data.operand[i],
4092			     recog_data.operand[goal_alternative_matched[i]],
4093			     recog_data.operand_loc[i],
4094			     recog_data.operand_loc[goal_alternative_matched[i]],
4095			     (enum reg_class) goal_alternative[i],
4096			     operand_mode[i],
4097			     operand_mode[goal_alternative_matched[i]],
4098			     0, 0, i, RELOAD_OTHER);
4099	    operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4100	  }
4101	else if (modified[i] == RELOAD_WRITE
4102		 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4103	  {
4104	    operand_reloadnum[goal_alternative_matched[i]]
4105	      = push_reload (recog_data.operand[goal_alternative_matched[i]],
4106			     recog_data.operand[i],
4107			     recog_data.operand_loc[goal_alternative_matched[i]],
4108			     recog_data.operand_loc[i],
4109			     (enum reg_class) goal_alternative[i],
4110			     operand_mode[goal_alternative_matched[i]],
4111			     operand_mode[i],
4112			     0, 0, i, RELOAD_OTHER);
4113	    operand_reloadnum[i] = output_reloadnum;
4114	  }
4115	else
4116	  {
4117	    gcc_assert (insn_code_number < 0);
4118	    error_for_asm (insn, "inconsistent operand constraints "
4119			   "in an %<asm%>");
4120	    /* Avoid further trouble with this insn.  */
4121	    PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4122	    n_reloads = 0;
4123	    return 0;
4124	  }
4125      }
4126    else if (goal_alternative_matched[i] < 0
4127	     && goal_alternative_matches[i] < 0
4128	     && address_operand_reloaded[i] != 1
4129	     && optimize)
4130      {
4131	/* For each non-matching operand that's a MEM or a pseudo-register
4132	   that didn't get a hard register, make an optional reload.
4133	   This may get done even if the insn needs no reloads otherwise.  */
4134
4135	rtx operand = recog_data.operand[i];
4136
4137	while (GET_CODE (operand) == SUBREG)
4138	  operand = SUBREG_REG (operand);
4139	if ((MEM_P (operand)
4140	     || (REG_P (operand)
4141		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4142	    /* If this is only for an output, the optional reload would not
4143	       actually cause us to use a register now, just note that
4144	       something is stored here.  */
4145	    && (goal_alternative[i] != NO_REGS
4146		|| modified[i] == RELOAD_WRITE)
4147	    && ! no_input_reloads
4148	    /* An optional output reload might allow to delete INSN later.
4149	       We mustn't make in-out reloads on insns that are not permitted
4150	       output reloads.
4151	       If this is an asm, we can't delete it; we must not even call
4152	       push_reload for an optional output reload in this case,
4153	       because we can't be sure that the constraint allows a register,
4154	       and push_reload verifies the constraints for asms.  */
4155	    && (modified[i] == RELOAD_READ
4156		|| (! no_output_reloads && ! this_insn_is_asm)))
4157	  operand_reloadnum[i]
4158	    = push_reload ((modified[i] != RELOAD_WRITE
4159			    ? recog_data.operand[i] : 0),
4160			   (modified[i] != RELOAD_READ
4161			    ? recog_data.operand[i] : 0),
4162			   (modified[i] != RELOAD_WRITE
4163			    ? recog_data.operand_loc[i] : 0),
4164			   (modified[i] != RELOAD_READ
4165			    ? recog_data.operand_loc[i] : 0),
4166			   (enum reg_class) goal_alternative[i],
4167			   (modified[i] == RELOAD_WRITE
4168			    ? VOIDmode : operand_mode[i]),
4169			   (modified[i] == RELOAD_READ
4170			    ? VOIDmode : operand_mode[i]),
4171			   (insn_code_number < 0 ? 0
4172			    : insn_data[insn_code_number].operand[i].strict_low),
4173			   1, i, operand_type[i]);
4174	/* If a memory reference remains (either as a MEM or a pseudo that
4175	   did not get a hard register), yet we can't make an optional
4176	   reload, check if this is actually a pseudo register reference;
4177	   we then need to emit a USE and/or a CLOBBER so that reload
4178	   inheritance will do the right thing.  */
4179	else if (replace
4180		 && (MEM_P (operand)
4181		     || (REG_P (operand)
4182			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4183			 && reg_renumber [REGNO (operand)] < 0)))
4184	  {
4185	    operand = *recog_data.operand_loc[i];
4186
4187	    while (GET_CODE (operand) == SUBREG)
4188	      operand = SUBREG_REG (operand);
4189	    if (REG_P (operand))
4190	      {
4191		if (modified[i] != RELOAD_WRITE)
4192		  /* We mark the USE with QImode so that we recognize
4193		     it as one that can be safely deleted at the end
4194		     of reload.  */
4195		  PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4196					      insn), QImode);
4197		if (modified[i] != RELOAD_READ)
4198		  emit_insn_after (gen_clobber (operand), insn);
4199	      }
4200	  }
4201      }
4202    else if (goal_alternative_matches[i] >= 0
4203	     && goal_alternative_win[goal_alternative_matches[i]]
4204	     && modified[i] == RELOAD_READ
4205	     && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4206	     && ! no_input_reloads && ! no_output_reloads
4207	     && optimize)
4208      {
4209	/* Similarly, make an optional reload for a pair of matching
4210	   objects that are in MEM or a pseudo that didn't get a hard reg.  */
4211
4212	rtx operand = recog_data.operand[i];
4213
4214	while (GET_CODE (operand) == SUBREG)
4215	  operand = SUBREG_REG (operand);
4216	if ((MEM_P (operand)
4217	     || (REG_P (operand)
4218		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4219	    && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4220	  operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4221	    = push_reload (recog_data.operand[goal_alternative_matches[i]],
4222			   recog_data.operand[i],
4223			   recog_data.operand_loc[goal_alternative_matches[i]],
4224			   recog_data.operand_loc[i],
4225			   (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4226			   operand_mode[goal_alternative_matches[i]],
4227			   operand_mode[i],
4228			   0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4229      }
4230
4231  /* Perform whatever substitutions on the operands we are supposed
4232     to make due to commutativity or replacement of registers
4233     with equivalent constants or memory slots.  */
4234
4235  for (i = 0; i < noperands; i++)
4236    {
4237      /* We only do this on the last pass through reload, because it is
4238	 possible for some data (like reg_equiv_address) to be changed during
4239	 later passes.  Moreover, we lose the opportunity to get a useful
4240	 reload_{in,out}_reg when we do these replacements.  */
4241
4242      if (replace)
4243	{
4244	  rtx substitution = substed_operand[i];
4245
4246	  *recog_data.operand_loc[i] = substitution;
4247
4248	  /* If we're replacing an operand with a LABEL_REF, we need to
4249	     make sure that there's a REG_LABEL_OPERAND note attached to
4250	     this instruction.  */
4251	  if (GET_CODE (substitution) == LABEL_REF
4252	      && !find_reg_note (insn, REG_LABEL_OPERAND,
4253				 LABEL_REF_LABEL (substitution))
4254	      /* For a JUMP_P, if it was a branch target it must have
4255		 already been recorded as such.  */
4256	      && (!JUMP_P (insn)
4257		  || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4258					      insn)))
4259	    {
4260	      add_reg_note (insn, REG_LABEL_OPERAND,
4261			    LABEL_REF_LABEL (substitution));
4262	      if (LABEL_P (LABEL_REF_LABEL (substitution)))
4263		++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4264	    }
4265
4266	}
4267      else
4268	retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4269    }
4270
4271  /* If this insn pattern contains any MATCH_DUP's, make sure that
4272     they will be substituted if the operands they match are substituted.
4273     Also do now any substitutions we already did on the operands.
4274
4275     Don't do this if we aren't making replacements because we might be
4276     propagating things allocated by frame pointer elimination into places
4277     it doesn't expect.  */
4278
4279  if (insn_code_number >= 0 && replace)
4280    for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4281      {
4282	int opno = recog_data.dup_num[i];
4283	*recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4284	dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4285      }
4286
4287#if 0
4288  /* This loses because reloading of prior insns can invalidate the equivalence
4289     (or at least find_equiv_reg isn't smart enough to find it any more),
4290     causing this insn to need more reload regs than it needed before.
4291     It may be too late to make the reload regs available.
4292     Now this optimization is done safely in choose_reload_regs.  */
4293
4294  /* For each reload of a reg into some other class of reg,
4295     search for an existing equivalent reg (same value now) in the right class.
4296     We can use it as long as we don't need to change its contents.  */
4297  for (i = 0; i < n_reloads; i++)
4298    if (rld[i].reg_rtx == 0
4299	&& rld[i].in != 0
4300	&& REG_P (rld[i].in)
4301	&& rld[i].out == 0)
4302      {
4303	rld[i].reg_rtx
4304	  = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4305			    static_reload_reg_p, 0, rld[i].inmode);
4306	/* Prevent generation of insn to load the value
4307	   because the one we found already has the value.  */
4308	if (rld[i].reg_rtx)
4309	  rld[i].in = rld[i].reg_rtx;
4310      }
4311#endif
4312
4313  /* If we detected error and replaced asm instruction by USE, forget about the
4314     reloads.  */
4315  if (GET_CODE (PATTERN (insn)) == USE
4316      && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4317    n_reloads = 0;
4318
4319  /* Perhaps an output reload can be combined with another
4320     to reduce needs by one.  */
4321  if (!goal_earlyclobber)
4322    combine_reloads ();
4323
4324  /* If we have a pair of reloads for parts of an address, they are reloading
4325     the same object, the operands themselves were not reloaded, and they
4326     are for two operands that are supposed to match, merge the reloads and
4327     change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS.  */
4328
4329  for (i = 0; i < n_reloads; i++)
4330    {
4331      int k;
4332
4333      for (j = i + 1; j < n_reloads; j++)
4334	if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4335	     || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4336	     || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4337	     || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4338	    && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4339		|| rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4340		|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4341		|| rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4342	    && rtx_equal_p (rld[i].in, rld[j].in)
4343	    && (operand_reloadnum[rld[i].opnum] < 0
4344		|| rld[operand_reloadnum[rld[i].opnum]].optional)
4345	    && (operand_reloadnum[rld[j].opnum] < 0
4346		|| rld[operand_reloadnum[rld[j].opnum]].optional)
4347	    && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4348		|| (goal_alternative_matches[rld[j].opnum]
4349		    == rld[i].opnum)))
4350	  {
4351	    for (k = 0; k < n_replacements; k++)
4352	      if (replacements[k].what == j)
4353		replacements[k].what = i;
4354
4355	    if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4356		|| rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4357	      rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4358	    else
4359	      rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4360	    rld[j].in = 0;
4361	  }
4362    }
4363
4364  /* Scan all the reloads and update their type.
4365     If a reload is for the address of an operand and we didn't reload
4366     that operand, change the type.  Similarly, change the operand number
4367     of a reload when two operands match.  If a reload is optional, treat it
4368     as though the operand isn't reloaded.
4369
4370     ??? This latter case is somewhat odd because if we do the optional
4371     reload, it means the object is hanging around.  Thus we need only
4372     do the address reload if the optional reload was NOT done.
4373
4374     Change secondary reloads to be the address type of their operand, not
4375     the normal type.
4376
4377     If an operand's reload is now RELOAD_OTHER, change any
4378     RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4379     RELOAD_FOR_OTHER_ADDRESS.  */
4380
4381  for (i = 0; i < n_reloads; i++)
4382    {
4383      if (rld[i].secondary_p
4384	  && rld[i].when_needed == operand_type[rld[i].opnum])
4385	rld[i].when_needed = address_type[rld[i].opnum];
4386
4387      if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4388	   || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4389	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4390	   || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4391	  && (operand_reloadnum[rld[i].opnum] < 0
4392	      || rld[operand_reloadnum[rld[i].opnum]].optional))
4393	{
4394	  /* If we have a secondary reload to go along with this reload,
4395	     change its type to RELOAD_FOR_OPADDR_ADDR.  */
4396
4397	  if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4398	       || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4399	      && rld[i].secondary_in_reload != -1)
4400	    {
4401	      int secondary_in_reload = rld[i].secondary_in_reload;
4402
4403	      rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4404
4405	      /* If there's a tertiary reload we have to change it also.  */
4406	      if (secondary_in_reload > 0
4407		  && rld[secondary_in_reload].secondary_in_reload != -1)
4408		rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4409		  = RELOAD_FOR_OPADDR_ADDR;
4410	    }
4411
4412	  if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4413	       || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4414	      && rld[i].secondary_out_reload != -1)
4415	    {
4416	      int secondary_out_reload = rld[i].secondary_out_reload;
4417
4418	      rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4419
4420	      /* If there's a tertiary reload we have to change it also.  */
4421	      if (secondary_out_reload
4422		  && rld[secondary_out_reload].secondary_out_reload != -1)
4423		rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4424		  = RELOAD_FOR_OPADDR_ADDR;
4425	    }
4426
4427	  if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4428	      || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4429	    rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4430	  else
4431	    rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4432	}
4433
4434      if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4435	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4436	  && operand_reloadnum[rld[i].opnum] >= 0
4437	  && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4438	      == RELOAD_OTHER))
4439	rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4440
4441      if (goal_alternative_matches[rld[i].opnum] >= 0)
4442	rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4443    }
4444
4445  /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4446     If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4447     reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4448
4449     choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4450     conflict with RELOAD_FOR_OPERAND_ADDRESS reloads.  This is true for a
4451     single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4452     However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4453     then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4454     RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4455     This is complicated by the fact that a single operand can have more
4456     than one RELOAD_FOR_OPERAND_ADDRESS reload.  It is very difficult to fix
4457     choose_reload_regs without affecting code quality, and cases that
4458     actually fail are extremely rare, so it turns out to be better to fix
4459     the problem here by not generating cases that choose_reload_regs will
4460     fail for.  */
4461  /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4462     RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4463     a single operand.
4464     We can reduce the register pressure by exploiting that a
4465     RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4466     does not conflict with any of them, if it is only used for the first of
4467     the RELOAD_FOR_X_ADDRESS reloads.  */
4468  {
4469    int first_op_addr_num = -2;
4470    int first_inpaddr_num[MAX_RECOG_OPERANDS];
4471    int first_outpaddr_num[MAX_RECOG_OPERANDS];
4472    int need_change = 0;
4473    /* We use last_op_addr_reload and the contents of the above arrays
4474       first as flags - -2 means no instance encountered, -1 means exactly
4475       one instance encountered.
4476       If more than one instance has been encountered, we store the reload
4477       number of the first reload of the kind in question; reload numbers
4478       are known to be non-negative.  */
4479    for (i = 0; i < noperands; i++)
4480      first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4481    for (i = n_reloads - 1; i >= 0; i--)
4482      {
4483	switch (rld[i].when_needed)
4484	  {
4485	  case RELOAD_FOR_OPERAND_ADDRESS:
4486	    if (++first_op_addr_num >= 0)
4487	      {
4488		first_op_addr_num = i;
4489		need_change = 1;
4490	      }
4491	    break;
4492	  case RELOAD_FOR_INPUT_ADDRESS:
4493	    if (++first_inpaddr_num[rld[i].opnum] >= 0)
4494	      {
4495		first_inpaddr_num[rld[i].opnum] = i;
4496		need_change = 1;
4497	      }
4498	    break;
4499	  case RELOAD_FOR_OUTPUT_ADDRESS:
4500	    if (++first_outpaddr_num[rld[i].opnum] >= 0)
4501	      {
4502		first_outpaddr_num[rld[i].opnum] = i;
4503		need_change = 1;
4504	      }
4505	    break;
4506	  default:
4507	    break;
4508	  }
4509      }
4510
4511    if (need_change)
4512      {
4513	for (i = 0; i < n_reloads; i++)
4514	  {
4515	    int first_num;
4516	    enum reload_type type;
4517
4518	    switch (rld[i].when_needed)
4519	      {
4520	      case RELOAD_FOR_OPADDR_ADDR:
4521		first_num = first_op_addr_num;
4522		type = RELOAD_FOR_OPERAND_ADDRESS;
4523		break;
4524	      case RELOAD_FOR_INPADDR_ADDRESS:
4525		first_num = first_inpaddr_num[rld[i].opnum];
4526		type = RELOAD_FOR_INPUT_ADDRESS;
4527		break;
4528	      case RELOAD_FOR_OUTADDR_ADDRESS:
4529		first_num = first_outpaddr_num[rld[i].opnum];
4530		type = RELOAD_FOR_OUTPUT_ADDRESS;
4531		break;
4532	      default:
4533		continue;
4534	      }
4535	    if (first_num < 0)
4536	      continue;
4537	    else if (i > first_num)
4538	      rld[i].when_needed = type;
4539	    else
4540	      {
4541		/* Check if the only TYPE reload that uses reload I is
4542		   reload FIRST_NUM.  */
4543		for (j = n_reloads - 1; j > first_num; j--)
4544		  {
4545		    if (rld[j].when_needed == type
4546			&& (rld[i].secondary_p
4547			    ? rld[j].secondary_in_reload == i
4548			    : reg_mentioned_p (rld[i].in, rld[j].in)))
4549		      {
4550			rld[i].when_needed = type;
4551			break;
4552		      }
4553		  }
4554	      }
4555	  }
4556      }
4557  }
4558
4559  /* See if we have any reloads that are now allowed to be merged
4560     because we've changed when the reload is needed to
4561     RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS.  Only
4562     check for the most common cases.  */
4563
4564  for (i = 0; i < n_reloads; i++)
4565    if (rld[i].in != 0 && rld[i].out == 0
4566	&& (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4567	    || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4568	    || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4569      for (j = 0; j < n_reloads; j++)
4570	if (i != j && rld[j].in != 0 && rld[j].out == 0
4571	    && rld[j].when_needed == rld[i].when_needed
4572	    && MATCHES (rld[i].in, rld[j].in)
4573	    && rld[i].rclass == rld[j].rclass
4574	    && !rld[i].nocombine && !rld[j].nocombine
4575	    && rld[i].reg_rtx == rld[j].reg_rtx)
4576	  {
4577	    rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4578	    transfer_replacements (i, j);
4579	    rld[j].in = 0;
4580	  }
4581
4582#ifdef HAVE_cc0
4583  /* If we made any reloads for addresses, see if they violate a
4584     "no input reloads" requirement for this insn.  But loads that we
4585     do after the insn (such as for output addresses) are fine.  */
4586  if (no_input_reloads)
4587    for (i = 0; i < n_reloads; i++)
4588      gcc_assert (rld[i].in == 0
4589		  || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4590		  || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4591#endif
4592
4593  /* Compute reload_mode and reload_nregs.  */
4594  for (i = 0; i < n_reloads; i++)
4595    {
4596      rld[i].mode
4597	= (rld[i].inmode == VOIDmode
4598	   || (GET_MODE_SIZE (rld[i].outmode)
4599	       > GET_MODE_SIZE (rld[i].inmode)))
4600	  ? rld[i].outmode : rld[i].inmode;
4601
4602      rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4603    }
4604
4605  /* Special case a simple move with an input reload and a
4606     destination of a hard reg, if the hard reg is ok, use it.  */
4607  for (i = 0; i < n_reloads; i++)
4608    if (rld[i].when_needed == RELOAD_FOR_INPUT
4609	&& GET_CODE (PATTERN (insn)) == SET
4610	&& REG_P (SET_DEST (PATTERN (insn)))
4611	&& (SET_SRC (PATTERN (insn)) == rld[i].in
4612	    || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4613	&& !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4614      {
4615	rtx dest = SET_DEST (PATTERN (insn));
4616	unsigned int regno = REGNO (dest);
4617
4618	if (regno < FIRST_PSEUDO_REGISTER
4619	    && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4620	    && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4621	  {
4622	    int nr = hard_regno_nregs[regno][rld[i].mode];
4623	    int ok = 1, nri;
4624
4625	    for (nri = 1; nri < nr; nri ++)
4626	      if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4627		{
4628		  ok = 0;
4629		  break;
4630		}
4631
4632	    if (ok)
4633	      rld[i].reg_rtx = dest;
4634	  }
4635      }
4636
4637  return retval;
4638}
4639
4640/* Return true if alternative number ALTNUM in constraint-string
4641   CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4642   MEM gives the reference if it didn't need any reloads, otherwise it
4643   is null.  */
4644
4645static bool
4646alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4647				   const char *constraint, int altnum)
4648{
4649  int c;
4650
4651  /* Skip alternatives before the one requested.  */
4652  while (altnum > 0)
4653    {
4654      while (*constraint++ != ',')
4655	;
4656      altnum--;
4657    }
4658  /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4659     If one of them is present, this alternative accepts the result of
4660     passing a constant-pool reference through find_reloads_toplev.
4661
4662     The same is true of extra memory constraints if the address
4663     was reloaded into a register.  However, the target may elect
4664     to disallow the original constant address, forcing it to be
4665     reloaded into a register instead.  */
4666  for (; (c = *constraint) && c != ',' && c != '#';
4667       constraint += CONSTRAINT_LEN (c, constraint))
4668    {
4669      enum constraint_num cn = lookup_constraint (constraint);
4670      if (insn_extra_memory_constraint (cn)
4671	  && (mem == NULL || constraint_satisfied_p (mem, cn)))
4672	return true;
4673    }
4674  return false;
4675}
4676
4677/* Scan X for memory references and scan the addresses for reloading.
4678   Also checks for references to "constant" regs that we want to eliminate
4679   and replaces them with the values they stand for.
4680   We may alter X destructively if it contains a reference to such.
4681   If X is just a constant reg, we return the equivalent value
4682   instead of X.
4683
4684   IND_LEVELS says how many levels of indirect addressing this machine
4685   supports.
4686
4687   OPNUM and TYPE identify the purpose of the reload.
4688
4689   IS_SET_DEST is true if X is the destination of a SET, which is not
4690   appropriate to be replaced by a constant.
4691
4692   INSN, if nonzero, is the insn in which we do the reload.  It is used
4693   to determine if we may generate output reloads, and where to put USEs
4694   for pseudos that we have to replace with stack slots.
4695
4696   ADDRESS_RELOADED.  If nonzero, is a pointer to where we put the
4697   result of find_reloads_address.  */
4698
4699static rtx
4700find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4701		     int ind_levels, int is_set_dest, rtx_insn *insn,
4702		     int *address_reloaded)
4703{
4704  RTX_CODE code = GET_CODE (x);
4705
4706  const char *fmt = GET_RTX_FORMAT (code);
4707  int i;
4708  int copied;
4709
4710  if (code == REG)
4711    {
4712      /* This code is duplicated for speed in find_reloads.  */
4713      int regno = REGNO (x);
4714      if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4715	x = reg_equiv_constant (regno);
4716#if 0
4717      /*  This creates (subreg (mem...)) which would cause an unnecessary
4718	  reload of the mem.  */
4719      else if (reg_equiv_mem (regno) != 0)
4720	x = reg_equiv_mem (regno);
4721#endif
4722      else if (reg_equiv_memory_loc (regno)
4723	       && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4724	{
4725	  rtx mem = make_memloc (x, regno);
4726	  if (reg_equiv_address (regno)
4727	      || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4728	    {
4729	      /* If this is not a toplevel operand, find_reloads doesn't see
4730		 this substitution.  We have to emit a USE of the pseudo so
4731		 that delete_output_reload can see it.  */
4732	      if (replace_reloads && recog_data.operand[opnum] != x)
4733		/* We mark the USE with QImode so that we recognize it
4734		   as one that can be safely deleted at the end of
4735		   reload.  */
4736		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4737			  QImode);
4738	      x = mem;
4739	      i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4740					opnum, type, ind_levels, insn);
4741	      if (!rtx_equal_p (x, mem))
4742		push_reg_equiv_alt_mem (regno, x);
4743	      if (address_reloaded)
4744		*address_reloaded = i;
4745	    }
4746	}
4747      return x;
4748    }
4749  if (code == MEM)
4750    {
4751      rtx tem = x;
4752
4753      i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4754				opnum, type, ind_levels, insn);
4755      if (address_reloaded)
4756	*address_reloaded = i;
4757
4758      return tem;
4759    }
4760
4761  if (code == SUBREG && REG_P (SUBREG_REG (x)))
4762    {
4763      /* Check for SUBREG containing a REG that's equivalent to a
4764	 constant.  If the constant has a known value, truncate it
4765	 right now.  Similarly if we are extracting a single-word of a
4766	 multi-word constant.  If the constant is symbolic, allow it
4767	 to be substituted normally.  push_reload will strip the
4768	 subreg later.  The constant must not be VOIDmode, because we
4769	 will lose the mode of the register (this should never happen
4770	 because one of the cases above should handle it).  */
4771
4772      int regno = REGNO (SUBREG_REG (x));
4773      rtx tem;
4774
4775      if (regno >= FIRST_PSEUDO_REGISTER
4776	  && reg_renumber[regno] < 0
4777	  && reg_equiv_constant (regno) != 0)
4778	{
4779	  tem =
4780	    simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4781				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4782	  gcc_assert (tem);
4783	  if (CONSTANT_P (tem)
4784	      && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4785	    {
4786	      tem = force_const_mem (GET_MODE (x), tem);
4787	      i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4788					&XEXP (tem, 0), opnum, type,
4789					ind_levels, insn);
4790	      if (address_reloaded)
4791		*address_reloaded = i;
4792	    }
4793	  return tem;
4794	}
4795
4796      /* If the subreg contains a reg that will be converted to a mem,
4797	 attempt to convert the whole subreg to a (narrower or wider)
4798	 memory reference instead.  If this succeeds, we're done --
4799	 otherwise fall through to check whether the inner reg still
4800	 needs address reloads anyway.  */
4801
4802      if (regno >= FIRST_PSEUDO_REGISTER
4803	  && reg_equiv_memory_loc (regno) != 0)
4804	{
4805	  tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4806					     insn, address_reloaded);
4807	  if (tem)
4808	    return tem;
4809	}
4810    }
4811
4812  for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4813    {
4814      if (fmt[i] == 'e')
4815	{
4816	  rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4817					      ind_levels, is_set_dest, insn,
4818					      address_reloaded);
4819	  /* If we have replaced a reg with it's equivalent memory loc -
4820	     that can still be handled here e.g. if it's in a paradoxical
4821	     subreg - we must make the change in a copy, rather than using
4822	     a destructive change.  This way, find_reloads can still elect
4823	     not to do the change.  */
4824	  if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4825	    {
4826	      x = shallow_copy_rtx (x);
4827	      copied = 1;
4828	    }
4829	  XEXP (x, i) = new_part;
4830	}
4831    }
4832  return x;
4833}
4834
4835/* Return a mem ref for the memory equivalent of reg REGNO.
4836   This mem ref is not shared with anything.  */
4837
4838static rtx
4839make_memloc (rtx ad, int regno)
4840{
4841  /* We must rerun eliminate_regs, in case the elimination
4842     offsets have changed.  */
4843  rtx tem
4844    = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4845	    0);
4846
4847  /* If TEM might contain a pseudo, we must copy it to avoid
4848     modifying it when we do the substitution for the reload.  */
4849  if (rtx_varies_p (tem, 0))
4850    tem = copy_rtx (tem);
4851
4852  tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4853  tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4854
4855  /* Copy the result if it's still the same as the equivalence, to avoid
4856     modifying it when we do the substitution for the reload.  */
4857  if (tem == reg_equiv_memory_loc (regno))
4858    tem = copy_rtx (tem);
4859  return tem;
4860}
4861
4862/* Returns true if AD could be turned into a valid memory reference
4863   to mode MODE in address space AS by reloading the part pointed to
4864   by PART into a register.  */
4865
4866static int
4867maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4868				   addr_space_t as, rtx *part)
4869{
4870  int retv;
4871  rtx tem = *part;
4872  rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4873
4874  *part = reg;
4875  retv = memory_address_addr_space_p (mode, ad, as);
4876  *part = tem;
4877
4878  return retv;
4879}
4880
4881/* Record all reloads needed for handling memory address AD
4882   which appears in *LOC in a memory reference to mode MODE
4883   which itself is found in location  *MEMREFLOC.
4884   Note that we take shortcuts assuming that no multi-reg machine mode
4885   occurs as part of an address.
4886
4887   OPNUM and TYPE specify the purpose of this reload.
4888
4889   IND_LEVELS says how many levels of indirect addressing this machine
4890   supports.
4891
4892   INSN, if nonzero, is the insn in which we do the reload.  It is used
4893   to determine if we may generate output reloads, and where to put USEs
4894   for pseudos that we have to replace with stack slots.
4895
4896   Value is one if this address is reloaded or replaced as a whole; it is
4897   zero if the top level of this address was not reloaded or replaced, and
4898   it is -1 if it may or may not have been reloaded or replaced.
4899
4900   Note that there is no verification that the address will be valid after
4901   this routine does its work.  Instead, we rely on the fact that the address
4902   was valid when reload started.  So we need only undo things that reload
4903   could have broken.  These are wrong register types, pseudos not allocated
4904   to a hard register, and frame pointer elimination.  */
4905
4906static int
4907find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4908		      rtx *loc, int opnum, enum reload_type type,
4909		      int ind_levels, rtx_insn *insn)
4910{
4911  addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4912			     : ADDR_SPACE_GENERIC;
4913  int regno;
4914  int removed_and = 0;
4915  int op_index;
4916  rtx tem;
4917
4918  /* If the address is a register, see if it is a legitimate address and
4919     reload if not.  We first handle the cases where we need not reload
4920     or where we must reload in a non-standard way.  */
4921
4922  if (REG_P (ad))
4923    {
4924      regno = REGNO (ad);
4925
4926      if (reg_equiv_constant (regno) != 0)
4927	{
4928	  find_reloads_address_part (reg_equiv_constant (regno), loc,
4929				     base_reg_class (mode, as, MEM, SCRATCH),
4930				     GET_MODE (ad), opnum, type, ind_levels);
4931	  return 1;
4932	}
4933
4934      tem = reg_equiv_memory_loc (regno);
4935      if (tem != 0)
4936	{
4937	  if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4938	    {
4939	      tem = make_memloc (ad, regno);
4940	      if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4941							XEXP (tem, 0),
4942							MEM_ADDR_SPACE (tem)))
4943		{
4944		  rtx orig = tem;
4945
4946		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4947					&XEXP (tem, 0), opnum,
4948					ADDR_TYPE (type), ind_levels, insn);
4949		  if (!rtx_equal_p (tem, orig))
4950		    push_reg_equiv_alt_mem (regno, tem);
4951		}
4952	      /* We can avoid a reload if the register's equivalent memory
4953		 expression is valid as an indirect memory address.
4954		 But not all addresses are valid in a mem used as an indirect
4955		 address: only reg or reg+constant.  */
4956
4957	      if (ind_levels > 0
4958		  && strict_memory_address_addr_space_p (mode, tem, as)
4959		  && (REG_P (XEXP (tem, 0))
4960		      || (GET_CODE (XEXP (tem, 0)) == PLUS
4961			  && REG_P (XEXP (XEXP (tem, 0), 0))
4962			  && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4963		{
4964		  /* TEM is not the same as what we'll be replacing the
4965		     pseudo with after reload, put a USE in front of INSN
4966		     in the final reload pass.  */
4967		  if (replace_reloads
4968		      && num_not_at_initial_offset
4969		      && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4970		    {
4971		      *loc = tem;
4972		      /* We mark the USE with QImode so that we
4973			 recognize it as one that can be safely
4974			 deleted at the end of reload.  */
4975		      PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4976						  insn), QImode);
4977
4978		      /* This doesn't really count as replacing the address
4979			 as a whole, since it is still a memory access.  */
4980		    }
4981		  return 0;
4982		}
4983	      ad = tem;
4984	    }
4985	}
4986
4987      /* The only remaining case where we can avoid a reload is if this is a
4988	 hard register that is valid as a base register and which is not the
4989	 subject of a CLOBBER in this insn.  */
4990
4991      else if (regno < FIRST_PSEUDO_REGISTER
4992	       && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4993	       && ! regno_clobbered_p (regno, this_insn, mode, 0))
4994	return 0;
4995
4996      /* If we do not have one of the cases above, we must do the reload.  */
4997      push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4998		   base_reg_class (mode, as, MEM, SCRATCH),
4999		   GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
5000      return 1;
5001    }
5002
5003  if (strict_memory_address_addr_space_p (mode, ad, as))
5004    {
5005      /* The address appears valid, so reloads are not needed.
5006	 But the address may contain an eliminable register.
5007	 This can happen because a machine with indirect addressing
5008	 may consider a pseudo register by itself a valid address even when
5009	 it has failed to get a hard reg.
5010	 So do a tree-walk to find and eliminate all such regs.  */
5011
5012      /* But first quickly dispose of a common case.  */
5013      if (GET_CODE (ad) == PLUS
5014	  && CONST_INT_P (XEXP (ad, 1))
5015	  && REG_P (XEXP (ad, 0))
5016	  && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5017	return 0;
5018
5019      subst_reg_equivs_changed = 0;
5020      *loc = subst_reg_equivs (ad, insn);
5021
5022      if (! subst_reg_equivs_changed)
5023	return 0;
5024
5025      /* Check result for validity after substitution.  */
5026      if (strict_memory_address_addr_space_p (mode, ad, as))
5027	return 0;
5028    }
5029
5030#ifdef LEGITIMIZE_RELOAD_ADDRESS
5031  do
5032    {
5033      if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5034	{
5035	  LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5036				     ind_levels, win);
5037	}
5038      break;
5039    win:
5040      *memrefloc = copy_rtx (*memrefloc);
5041      XEXP (*memrefloc, 0) = ad;
5042      move_replacements (&ad, &XEXP (*memrefloc, 0));
5043      return -1;
5044    }
5045  while (0);
5046#endif
5047
5048  /* The address is not valid.  We have to figure out why.  First see if
5049     we have an outer AND and remove it if so.  Then analyze what's inside.  */
5050
5051  if (GET_CODE (ad) == AND)
5052    {
5053      removed_and = 1;
5054      loc = &XEXP (ad, 0);
5055      ad = *loc;
5056    }
5057
5058  /* One possibility for why the address is invalid is that it is itself
5059     a MEM.  This can happen when the frame pointer is being eliminated, a
5060     pseudo is not allocated to a hard register, and the offset between the
5061     frame and stack pointers is not its initial value.  In that case the
5062     pseudo will have been replaced by a MEM referring to the
5063     stack pointer.  */
5064  if (MEM_P (ad))
5065    {
5066      /* First ensure that the address in this MEM is valid.  Then, unless
5067	 indirect addresses are valid, reload the MEM into a register.  */
5068      tem = ad;
5069      find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5070			    opnum, ADDR_TYPE (type),
5071			    ind_levels == 0 ? 0 : ind_levels - 1, insn);
5072
5073      /* If tem was changed, then we must create a new memory reference to
5074	 hold it and store it back into memrefloc.  */
5075      if (tem != ad && memrefloc)
5076	{
5077	  *memrefloc = copy_rtx (*memrefloc);
5078	  copy_replacements (tem, XEXP (*memrefloc, 0));
5079	  loc = &XEXP (*memrefloc, 0);
5080	  if (removed_and)
5081	    loc = &XEXP (*loc, 0);
5082	}
5083
5084      /* Check similar cases as for indirect addresses as above except
5085	 that we can allow pseudos and a MEM since they should have been
5086	 taken care of above.  */
5087
5088      if (ind_levels == 0
5089	  || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5090	  || MEM_P (XEXP (tem, 0))
5091	  || ! (REG_P (XEXP (tem, 0))
5092		|| (GET_CODE (XEXP (tem, 0)) == PLUS
5093		    && REG_P (XEXP (XEXP (tem, 0), 0))
5094		    && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5095	{
5096	  /* Must use TEM here, not AD, since it is the one that will
5097	     have any subexpressions reloaded, if needed.  */
5098	  push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5099		       base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5100		       VOIDmode, 0,
5101		       0, opnum, type);
5102	  return ! removed_and;
5103	}
5104      else
5105	return 0;
5106    }
5107
5108  /* If we have address of a stack slot but it's not valid because the
5109     displacement is too large, compute the sum in a register.
5110     Handle all base registers here, not just fp/ap/sp, because on some
5111     targets (namely SH) we can also get too large displacements from
5112     big-endian corrections.  */
5113  else if (GET_CODE (ad) == PLUS
5114	   && REG_P (XEXP (ad, 0))
5115	   && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5116	   && CONST_INT_P (XEXP (ad, 1))
5117	   && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5118				    CONST_INT)
5119	       /* Similarly, if we were to reload the base register and the
5120		  mem+offset address is still invalid, then we want to reload
5121		  the whole address, not just the base register.  */
5122	       || ! maybe_memory_address_addr_space_p
5123		     (mode, ad, as, &(XEXP (ad, 0)))))
5124
5125    {
5126      /* Unshare the MEM rtx so we can safely alter it.  */
5127      if (memrefloc)
5128	{
5129	  *memrefloc = copy_rtx (*memrefloc);
5130	  loc = &XEXP (*memrefloc, 0);
5131	  if (removed_and)
5132	    loc = &XEXP (*loc, 0);
5133	}
5134
5135      if (double_reg_address_ok
5136	  && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5137				  PLUS, CONST_INT))
5138	{
5139	  /* Unshare the sum as well.  */
5140	  *loc = ad = copy_rtx (ad);
5141
5142	  /* Reload the displacement into an index reg.
5143	     We assume the frame pointer or arg pointer is a base reg.  */
5144	  find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5145				     INDEX_REG_CLASS, GET_MODE (ad), opnum,
5146				     type, ind_levels);
5147	  return 0;
5148	}
5149      else
5150	{
5151	  /* If the sum of two regs is not necessarily valid,
5152	     reload the sum into a base reg.
5153	     That will at least work.  */
5154	  find_reloads_address_part (ad, loc,
5155				     base_reg_class (mode, as, MEM, SCRATCH),
5156				     GET_MODE (ad), opnum, type, ind_levels);
5157	}
5158      return ! removed_and;
5159    }
5160
5161  /* If we have an indexed stack slot, there are three possible reasons why
5162     it might be invalid: The index might need to be reloaded, the address
5163     might have been made by frame pointer elimination and hence have a
5164     constant out of range, or both reasons might apply.
5165
5166     We can easily check for an index needing reload, but even if that is the
5167     case, we might also have an invalid constant.  To avoid making the
5168     conservative assumption and requiring two reloads, we see if this address
5169     is valid when not interpreted strictly.  If it is, the only problem is
5170     that the index needs a reload and find_reloads_address_1 will take care
5171     of it.
5172
5173     Handle all base registers here, not just fp/ap/sp, because on some
5174     targets (namely SPARC) we can also get invalid addresses from preventive
5175     subreg big-endian corrections made by find_reloads_toplev.  We
5176     can also get expressions involving LO_SUM (rather than PLUS) from
5177     find_reloads_subreg_address.
5178
5179     If we decide to do something, it must be that `double_reg_address_ok'
5180     is true.  We generate a reload of the base register + constant and
5181     rework the sum so that the reload register will be added to the index.
5182     This is safe because we know the address isn't shared.
5183
5184     We check for the base register as both the first and second operand of
5185     the innermost PLUS and/or LO_SUM.  */
5186
5187  for (op_index = 0; op_index < 2; ++op_index)
5188    {
5189      rtx operand, addend;
5190      enum rtx_code inner_code;
5191
5192      if (GET_CODE (ad) != PLUS)
5193	  continue;
5194
5195      inner_code = GET_CODE (XEXP (ad, 0));
5196      if (!(GET_CODE (ad) == PLUS
5197	    && CONST_INT_P (XEXP (ad, 1))
5198	    && (inner_code == PLUS || inner_code == LO_SUM)))
5199	continue;
5200
5201      operand = XEXP (XEXP (ad, 0), op_index);
5202      if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5203	continue;
5204
5205      addend = XEXP (XEXP (ad, 0), 1 - op_index);
5206
5207      if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5208				GET_CODE (addend))
5209	   || operand == frame_pointer_rtx
5210#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5211	   || operand == hard_frame_pointer_rtx
5212#endif
5213#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5214	   || operand == arg_pointer_rtx
5215#endif
5216	   || operand == stack_pointer_rtx)
5217	  && ! maybe_memory_address_addr_space_p
5218		(mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5219	{
5220	  rtx offset_reg;
5221	  enum reg_class cls;
5222
5223	  offset_reg = plus_constant (GET_MODE (ad), operand,
5224				      INTVAL (XEXP (ad, 1)));
5225
5226	  /* Form the adjusted address.  */
5227	  if (GET_CODE (XEXP (ad, 0)) == PLUS)
5228	    ad = gen_rtx_PLUS (GET_MODE (ad),
5229			       op_index == 0 ? offset_reg : addend,
5230			       op_index == 0 ? addend : offset_reg);
5231	  else
5232	    ad = gen_rtx_LO_SUM (GET_MODE (ad),
5233				 op_index == 0 ? offset_reg : addend,
5234				 op_index == 0 ? addend : offset_reg);
5235	  *loc = ad;
5236
5237	  cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5238	  find_reloads_address_part (XEXP (ad, op_index),
5239				     &XEXP (ad, op_index), cls,
5240				     GET_MODE (ad), opnum, type, ind_levels);
5241	  find_reloads_address_1 (mode, as,
5242				  XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5243				  GET_CODE (XEXP (ad, op_index)),
5244				  &XEXP (ad, 1 - op_index), opnum,
5245				  type, 0, insn);
5246
5247	  return 0;
5248	}
5249    }
5250
5251  /* See if address becomes valid when an eliminable register
5252     in a sum is replaced.  */
5253
5254  tem = ad;
5255  if (GET_CODE (ad) == PLUS)
5256    tem = subst_indexed_address (ad);
5257  if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5258    {
5259      /* Ok, we win that way.  Replace any additional eliminable
5260	 registers.  */
5261
5262      subst_reg_equivs_changed = 0;
5263      tem = subst_reg_equivs (tem, insn);
5264
5265      /* Make sure that didn't make the address invalid again.  */
5266
5267      if (! subst_reg_equivs_changed
5268	  || strict_memory_address_addr_space_p (mode, tem, as))
5269	{
5270	  *loc = tem;
5271	  return 0;
5272	}
5273    }
5274
5275  /* If constants aren't valid addresses, reload the constant address
5276     into a register.  */
5277  if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5278    {
5279      machine_mode address_mode = GET_MODE (ad);
5280      if (address_mode == VOIDmode)
5281	address_mode = targetm.addr_space.address_mode (as);
5282
5283      /* If AD is an address in the constant pool, the MEM rtx may be shared.
5284	 Unshare it so we can safely alter it.  */
5285      if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5286	  && CONSTANT_POOL_ADDRESS_P (ad))
5287	{
5288	  *memrefloc = copy_rtx (*memrefloc);
5289	  loc = &XEXP (*memrefloc, 0);
5290	  if (removed_and)
5291	    loc = &XEXP (*loc, 0);
5292	}
5293
5294      find_reloads_address_part (ad, loc,
5295				 base_reg_class (mode, as, MEM, SCRATCH),
5296				 address_mode, opnum, type, ind_levels);
5297      return ! removed_and;
5298    }
5299
5300  return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5301				 opnum, type, ind_levels, insn);
5302}
5303
5304/* Find all pseudo regs appearing in AD
5305   that are eliminable in favor of equivalent values
5306   and do not have hard regs; replace them by their equivalents.
5307   INSN, if nonzero, is the insn in which we do the reload.  We put USEs in
5308   front of it for pseudos that we have to replace with stack slots.  */
5309
5310static rtx
5311subst_reg_equivs (rtx ad, rtx_insn *insn)
5312{
5313  RTX_CODE code = GET_CODE (ad);
5314  int i;
5315  const char *fmt;
5316
5317  switch (code)
5318    {
5319    case HIGH:
5320    case CONST:
5321    CASE_CONST_ANY:
5322    case SYMBOL_REF:
5323    case LABEL_REF:
5324    case PC:
5325    case CC0:
5326      return ad;
5327
5328    case REG:
5329      {
5330	int regno = REGNO (ad);
5331
5332	if (reg_equiv_constant (regno) != 0)
5333	  {
5334	    subst_reg_equivs_changed = 1;
5335	    return reg_equiv_constant (regno);
5336	  }
5337	if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5338	  {
5339	    rtx mem = make_memloc (ad, regno);
5340	    if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5341	      {
5342		subst_reg_equivs_changed = 1;
5343		/* We mark the USE with QImode so that we recognize it
5344		   as one that can be safely deleted at the end of
5345		   reload.  */
5346		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5347			  QImode);
5348		return mem;
5349	      }
5350	  }
5351      }
5352      return ad;
5353
5354    case PLUS:
5355      /* Quickly dispose of a common case.  */
5356      if (XEXP (ad, 0) == frame_pointer_rtx
5357	  && CONST_INT_P (XEXP (ad, 1)))
5358	return ad;
5359      break;
5360
5361    default:
5362      break;
5363    }
5364
5365  fmt = GET_RTX_FORMAT (code);
5366  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5367    if (fmt[i] == 'e')
5368      XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5369  return ad;
5370}
5371
5372/* Compute the sum of X and Y, making canonicalizations assumed in an
5373   address, namely: sum constant integers, surround the sum of two
5374   constants with a CONST, put the constant as the second operand, and
5375   group the constant on the outermost sum.
5376
5377   This routine assumes both inputs are already in canonical form.  */
5378
5379rtx
5380form_sum (machine_mode mode, rtx x, rtx y)
5381{
5382  rtx tem;
5383
5384  gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5385  gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5386
5387  if (CONST_INT_P (x))
5388    return plus_constant (mode, y, INTVAL (x));
5389  else if (CONST_INT_P (y))
5390    return plus_constant (mode, x, INTVAL (y));
5391  else if (CONSTANT_P (x))
5392    tem = x, x = y, y = tem;
5393
5394  if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5395    return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5396
5397  /* Note that if the operands of Y are specified in the opposite
5398     order in the recursive calls below, infinite recursion will occur.  */
5399  if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5400    return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5401
5402  /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
5403     constant will have been placed second.  */
5404  if (CONSTANT_P (x) && CONSTANT_P (y))
5405    {
5406      if (GET_CODE (x) == CONST)
5407	x = XEXP (x, 0);
5408      if (GET_CODE (y) == CONST)
5409	y = XEXP (y, 0);
5410
5411      return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5412    }
5413
5414  return gen_rtx_PLUS (mode, x, y);
5415}
5416
5417/* If ADDR is a sum containing a pseudo register that should be
5418   replaced with a constant (from reg_equiv_constant),
5419   return the result of doing so, and also apply the associative
5420   law so that the result is more likely to be a valid address.
5421   (But it is not guaranteed to be one.)
5422
5423   Note that at most one register is replaced, even if more are
5424   replaceable.  Also, we try to put the result into a canonical form
5425   so it is more likely to be a valid address.
5426
5427   In all other cases, return ADDR.  */
5428
5429static rtx
5430subst_indexed_address (rtx addr)
5431{
5432  rtx op0 = 0, op1 = 0, op2 = 0;
5433  rtx tem;
5434  int regno;
5435
5436  if (GET_CODE (addr) == PLUS)
5437    {
5438      /* Try to find a register to replace.  */
5439      op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5440      if (REG_P (op0)
5441	  && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5442	  && reg_renumber[regno] < 0
5443	  && reg_equiv_constant (regno) != 0)
5444	op0 = reg_equiv_constant (regno);
5445      else if (REG_P (op1)
5446	       && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5447	       && reg_renumber[regno] < 0
5448	       && reg_equiv_constant (regno) != 0)
5449	op1 = reg_equiv_constant (regno);
5450      else if (GET_CODE (op0) == PLUS
5451	       && (tem = subst_indexed_address (op0)) != op0)
5452	op0 = tem;
5453      else if (GET_CODE (op1) == PLUS
5454	       && (tem = subst_indexed_address (op1)) != op1)
5455	op1 = tem;
5456      else
5457	return addr;
5458
5459      /* Pick out up to three things to add.  */
5460      if (GET_CODE (op1) == PLUS)
5461	op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5462      else if (GET_CODE (op0) == PLUS)
5463	op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5464
5465      /* Compute the sum.  */
5466      if (op2 != 0)
5467	op1 = form_sum (GET_MODE (addr), op1, op2);
5468      if (op1 != 0)
5469	op0 = form_sum (GET_MODE (addr), op0, op1);
5470
5471      return op0;
5472    }
5473  return addr;
5474}
5475
5476/* Update the REG_INC notes for an insn.  It updates all REG_INC
5477   notes for the instruction which refer to REGNO the to refer
5478   to the reload number.
5479
5480   INSN is the insn for which any REG_INC notes need updating.
5481
5482   REGNO is the register number which has been reloaded.
5483
5484   RELOADNUM is the reload number.  */
5485
5486static void
5487update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5488		       int reloadnum ATTRIBUTE_UNUSED)
5489{
5490#ifdef AUTO_INC_DEC
5491  rtx link;
5492
5493  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5494    if (REG_NOTE_KIND (link) == REG_INC
5495        && (int) REGNO (XEXP (link, 0)) == regno)
5496      push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5497#endif
5498}
5499
5500/* Record the pseudo registers we must reload into hard registers in a
5501   subexpression of a would-be memory address, X referring to a value
5502   in mode MODE.  (This function is not called if the address we find
5503   is strictly valid.)
5504
5505   CONTEXT = 1 means we are considering regs as index regs,
5506   = 0 means we are considering them as base regs.
5507   OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5508   or an autoinc code.
5509   If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5510   is the code of the index part of the address.  Otherwise, pass SCRATCH
5511   for this argument.
5512   OPNUM and TYPE specify the purpose of any reloads made.
5513
5514   IND_LEVELS says how many levels of indirect addressing are
5515   supported at this point in the address.
5516
5517   INSN, if nonzero, is the insn in which we do the reload.  It is used
5518   to determine if we may generate output reloads.
5519
5520   We return nonzero if X, as a whole, is reloaded or replaced.  */
5521
5522/* Note that we take shortcuts assuming that no multi-reg machine mode
5523   occurs as part of an address.
5524   Also, this is not fully machine-customizable; it works for machines
5525   such as VAXen and 68000's and 32000's, but other possible machines
5526   could have addressing modes that this does not handle right.
5527   If you add push_reload calls here, you need to make sure gen_reload
5528   handles those cases gracefully.  */
5529
5530static int
5531find_reloads_address_1 (machine_mode mode, addr_space_t as,
5532			rtx x, int context,
5533			enum rtx_code outer_code, enum rtx_code index_code,
5534			rtx *loc, int opnum, enum reload_type type,
5535			int ind_levels, rtx_insn *insn)
5536{
5537#define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX)	\
5538  ((CONTEXT) == 0							\
5539   ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX)		\
5540   : REGNO_OK_FOR_INDEX_P (REGNO))
5541
5542  enum reg_class context_reg_class;
5543  RTX_CODE code = GET_CODE (x);
5544  bool reloaded_inner_of_autoinc = false;
5545
5546  if (context == 1)
5547    context_reg_class = INDEX_REG_CLASS;
5548  else
5549    context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5550
5551  switch (code)
5552    {
5553    case PLUS:
5554      {
5555	rtx orig_op0 = XEXP (x, 0);
5556	rtx orig_op1 = XEXP (x, 1);
5557	RTX_CODE code0 = GET_CODE (orig_op0);
5558	RTX_CODE code1 = GET_CODE (orig_op1);
5559	rtx op0 = orig_op0;
5560	rtx op1 = orig_op1;
5561
5562	if (GET_CODE (op0) == SUBREG)
5563	  {
5564	    op0 = SUBREG_REG (op0);
5565	    code0 = GET_CODE (op0);
5566	    if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5567	      op0 = gen_rtx_REG (word_mode,
5568				 (REGNO (op0) +
5569				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5570						       GET_MODE (SUBREG_REG (orig_op0)),
5571						       SUBREG_BYTE (orig_op0),
5572						       GET_MODE (orig_op0))));
5573	  }
5574
5575	if (GET_CODE (op1) == SUBREG)
5576	  {
5577	    op1 = SUBREG_REG (op1);
5578	    code1 = GET_CODE (op1);
5579	    if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5580	      /* ??? Why is this given op1's mode and above for
5581		 ??? op0 SUBREGs we use word_mode?  */
5582	      op1 = gen_rtx_REG (GET_MODE (op1),
5583				 (REGNO (op1) +
5584				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5585						       GET_MODE (SUBREG_REG (orig_op1)),
5586						       SUBREG_BYTE (orig_op1),
5587						       GET_MODE (orig_op1))));
5588	  }
5589	/* Plus in the index register may be created only as a result of
5590	   register rematerialization for expression like &localvar*4.  Reload it.
5591	   It may be possible to combine the displacement on the outer level,
5592	   but it is probably not worthwhile to do so.  */
5593	if (context == 1)
5594	  {
5595	    find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5596				  opnum, ADDR_TYPE (type), ind_levels, insn);
5597	    push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5598			 context_reg_class,
5599			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5600	    return 1;
5601	  }
5602
5603	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5604	    || code0 == ZERO_EXTEND || code1 == MEM)
5605	  {
5606	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5607				    &XEXP (x, 0), opnum, type, ind_levels,
5608				    insn);
5609	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5610				    &XEXP (x, 1), opnum, type, ind_levels,
5611				    insn);
5612	  }
5613
5614	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5615		 || code1 == ZERO_EXTEND || code0 == MEM)
5616	  {
5617	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5618				    &XEXP (x, 0), opnum, type, ind_levels,
5619				    insn);
5620	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5621				    &XEXP (x, 1), opnum, type, ind_levels,
5622				    insn);
5623	  }
5624
5625	else if (code0 == CONST_INT || code0 == CONST
5626		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5627	  find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5628				  &XEXP (x, 1), opnum, type, ind_levels,
5629				  insn);
5630
5631	else if (code1 == CONST_INT || code1 == CONST
5632		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5633	  find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5634				  &XEXP (x, 0), opnum, type, ind_levels,
5635				  insn);
5636
5637	else if (code0 == REG && code1 == REG)
5638	  {
5639	    if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5640		&& regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5641	      return 0;
5642	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5643		     && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5644	      return 0;
5645	    else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5646	      find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5647				      &XEXP (x, 1), opnum, type, ind_levels,
5648				      insn);
5649	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5650	      find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5651				      &XEXP (x, 0), opnum, type, ind_levels,
5652				      insn);
5653	    else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5654	      find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5655				      &XEXP (x, 0), opnum, type, ind_levels,
5656				      insn);
5657	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5658	      find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5659				      &XEXP (x, 1), opnum, type, ind_levels,
5660				      insn);
5661	    else
5662	      {
5663		find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5664					&XEXP (x, 0), opnum, type, ind_levels,
5665					insn);
5666		find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5667					&XEXP (x, 1), opnum, type, ind_levels,
5668					insn);
5669	      }
5670	  }
5671
5672	else if (code0 == REG)
5673	  {
5674	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5675				    &XEXP (x, 0), opnum, type, ind_levels,
5676				    insn);
5677	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5678				    &XEXP (x, 1), opnum, type, ind_levels,
5679				    insn);
5680	  }
5681
5682	else if (code1 == REG)
5683	  {
5684	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5685				    &XEXP (x, 1), opnum, type, ind_levels,
5686				    insn);
5687	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5688				    &XEXP (x, 0), opnum, type, ind_levels,
5689				    insn);
5690	  }
5691      }
5692
5693      return 0;
5694
5695    case POST_MODIFY:
5696    case PRE_MODIFY:
5697      {
5698	rtx op0 = XEXP (x, 0);
5699	rtx op1 = XEXP (x, 1);
5700	enum rtx_code index_code;
5701	int regno;
5702	int reloadnum;
5703
5704	if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5705	  return 0;
5706
5707	/* Currently, we only support {PRE,POST}_MODIFY constructs
5708	   where a base register is {inc,dec}remented by the contents
5709	   of another register or by a constant value.  Thus, these
5710	   operands must match.  */
5711	gcc_assert (op0 == XEXP (op1, 0));
5712
5713	/* Require index register (or constant).  Let's just handle the
5714	   register case in the meantime... If the target allows
5715	   auto-modify by a constant then we could try replacing a pseudo
5716	   register with its equivalent constant where applicable.
5717
5718	   We also handle the case where the register was eliminated
5719	   resulting in a PLUS subexpression.
5720
5721	   If we later decide to reload the whole PRE_MODIFY or
5722	   POST_MODIFY, inc_for_reload might clobber the reload register
5723	   before reading the index.  The index register might therefore
5724	   need to live longer than a TYPE reload normally would, so be
5725	   conservative and class it as RELOAD_OTHER.  */
5726	if ((REG_P (XEXP (op1, 1))
5727	     && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5728	    || GET_CODE (XEXP (op1, 1)) == PLUS)
5729	  find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5730				  &XEXP (op1, 1), opnum, RELOAD_OTHER,
5731				  ind_levels, insn);
5732
5733	gcc_assert (REG_P (XEXP (op1, 0)));
5734
5735	regno = REGNO (XEXP (op1, 0));
5736	index_code = GET_CODE (XEXP (op1, 1));
5737
5738	/* A register that is incremented cannot be constant!  */
5739	gcc_assert (regno < FIRST_PSEUDO_REGISTER
5740		    || reg_equiv_constant (regno) == 0);
5741
5742	/* Handle a register that is equivalent to a memory location
5743	    which cannot be addressed directly.  */
5744	if (reg_equiv_memory_loc (regno) != 0
5745	    && (reg_equiv_address (regno) != 0
5746		|| num_not_at_initial_offset))
5747	  {
5748	    rtx tem = make_memloc (XEXP (x, 0), regno);
5749
5750	    if (reg_equiv_address (regno)
5751		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5752	      {
5753		rtx orig = tem;
5754
5755		/* First reload the memory location's address.
5756		    We can't use ADDR_TYPE (type) here, because we need to
5757		    write back the value after reading it, hence we actually
5758		    need two registers.  */
5759		find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5760				      &XEXP (tem, 0), opnum,
5761				      RELOAD_OTHER,
5762				      ind_levels, insn);
5763
5764		if (!rtx_equal_p (tem, orig))
5765		  push_reg_equiv_alt_mem (regno, tem);
5766
5767		/* Then reload the memory location into a base
5768		   register.  */
5769		reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5770					 &XEXP (op1, 0),
5771					 base_reg_class (mode, as,
5772							 code, index_code),
5773					 GET_MODE (x), GET_MODE (x), 0,
5774					 0, opnum, RELOAD_OTHER);
5775
5776		update_auto_inc_notes (this_insn, regno, reloadnum);
5777		return 0;
5778	      }
5779	  }
5780
5781	if (reg_renumber[regno] >= 0)
5782	  regno = reg_renumber[regno];
5783
5784	/* We require a base register here...  */
5785	if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5786	  {
5787	    reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5788				     &XEXP (op1, 0), &XEXP (x, 0),
5789				     base_reg_class (mode, as,
5790						     code, index_code),
5791				     GET_MODE (x), GET_MODE (x), 0, 0,
5792				     opnum, RELOAD_OTHER);
5793
5794	    update_auto_inc_notes (this_insn, regno, reloadnum);
5795	    return 0;
5796	  }
5797      }
5798      return 0;
5799
5800    case POST_INC:
5801    case POST_DEC:
5802    case PRE_INC:
5803    case PRE_DEC:
5804      if (REG_P (XEXP (x, 0)))
5805	{
5806	  int regno = REGNO (XEXP (x, 0));
5807	  int value = 0;
5808	  rtx x_orig = x;
5809
5810	  /* A register that is incremented cannot be constant!  */
5811	  gcc_assert (regno < FIRST_PSEUDO_REGISTER
5812		      || reg_equiv_constant (regno) == 0);
5813
5814	  /* Handle a register that is equivalent to a memory location
5815	     which cannot be addressed directly.  */
5816	  if (reg_equiv_memory_loc (regno) != 0
5817	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5818	    {
5819	      rtx tem = make_memloc (XEXP (x, 0), regno);
5820	      if (reg_equiv_address (regno)
5821		  || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5822		{
5823		  rtx orig = tem;
5824
5825		  /* First reload the memory location's address.
5826		     We can't use ADDR_TYPE (type) here, because we need to
5827		     write back the value after reading it, hence we actually
5828		     need two registers.  */
5829		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5830					&XEXP (tem, 0), opnum, type,
5831					ind_levels, insn);
5832		  reloaded_inner_of_autoinc = true;
5833		  if (!rtx_equal_p (tem, orig))
5834		    push_reg_equiv_alt_mem (regno, tem);
5835		  /* Put this inside a new increment-expression.  */
5836		  x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5837		  /* Proceed to reload that, as if it contained a register.  */
5838		}
5839	    }
5840
5841	  /* If we have a hard register that is ok in this incdec context,
5842	     don't make a reload.  If the register isn't nice enough for
5843	     autoincdec, we can reload it.  But, if an autoincrement of a
5844	     register that we here verified as playing nice, still outside
5845	     isn't "valid", it must be that no autoincrement is "valid".
5846	     If that is true and something made an autoincrement anyway,
5847	     this must be a special context where one is allowed.
5848	     (For example, a "push" instruction.)
5849	     We can't improve this address, so leave it alone.  */
5850
5851	  /* Otherwise, reload the autoincrement into a suitable hard reg
5852	     and record how much to increment by.  */
5853
5854	  if (reg_renumber[regno] >= 0)
5855	    regno = reg_renumber[regno];
5856	  if (regno >= FIRST_PSEUDO_REGISTER
5857	      || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5858				      index_code))
5859	    {
5860	      int reloadnum;
5861
5862	      /* If we can output the register afterwards, do so, this
5863		 saves the extra update.
5864		 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5865		 CALL_INSN - and it does not set CC0.
5866		 But don't do this if we cannot directly address the
5867		 memory location, since this will make it harder to
5868		 reuse address reloads, and increases register pressure.
5869		 Also don't do this if we can probably update x directly.  */
5870	      rtx equiv = (MEM_P (XEXP (x, 0))
5871			   ? XEXP (x, 0)
5872			   : reg_equiv_mem (regno));
5873	      enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5874	      if (insn && NONJUMP_INSN_P (insn) && equiv
5875		  && memory_operand (equiv, GET_MODE (equiv))
5876#ifdef HAVE_cc0
5877		  && ! sets_cc0_p (PATTERN (insn))
5878#endif
5879		  && ! (icode != CODE_FOR_nothing
5880			&& insn_operand_matches (icode, 0, equiv)
5881			&& insn_operand_matches (icode, 1, equiv))
5882		  /* Using RELOAD_OTHER means we emit this and the reload we
5883		     made earlier in the wrong order.  */
5884		  && !reloaded_inner_of_autoinc)
5885		{
5886		  /* We use the original pseudo for loc, so that
5887		     emit_reload_insns() knows which pseudo this
5888		     reload refers to and updates the pseudo rtx, not
5889		     its equivalent memory location, as well as the
5890		     corresponding entry in reg_last_reload_reg.  */
5891		  loc = &XEXP (x_orig, 0);
5892		  x = XEXP (x, 0);
5893		  reloadnum
5894		    = push_reload (x, x, loc, loc,
5895				   context_reg_class,
5896				   GET_MODE (x), GET_MODE (x), 0, 0,
5897				   opnum, RELOAD_OTHER);
5898		}
5899	      else
5900		{
5901		  reloadnum
5902		    = push_reload (x, x, loc, (rtx*) 0,
5903				   context_reg_class,
5904				   GET_MODE (x), GET_MODE (x), 0, 0,
5905				   opnum, type);
5906		  rld[reloadnum].inc
5907		    = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5908
5909		  value = 1;
5910		}
5911
5912	      update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5913				     reloadnum);
5914	    }
5915	  return value;
5916	}
5917      return 0;
5918
5919    case TRUNCATE:
5920    case SIGN_EXTEND:
5921    case ZERO_EXTEND:
5922      /* Look for parts to reload in the inner expression and reload them
5923	 too, in addition to this operation.  Reloading all inner parts in
5924	 addition to this one shouldn't be necessary, but at this point,
5925	 we don't know if we can possibly omit any part that *can* be
5926	 reloaded.  Targets that are better off reloading just either part
5927	 (or perhaps even a different part of an outer expression), should
5928	 define LEGITIMIZE_RELOAD_ADDRESS.  */
5929      find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5930			      context, code, SCRATCH, &XEXP (x, 0), opnum,
5931			      type, ind_levels, insn);
5932      push_reload (x, NULL_RTX, loc, (rtx*) 0,
5933		   context_reg_class,
5934		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5935      return 1;
5936
5937    case MEM:
5938      /* This is probably the result of a substitution, by eliminate_regs, of
5939	 an equivalent address for a pseudo that was not allocated to a hard
5940	 register.  Verify that the specified address is valid and reload it
5941	 into a register.
5942
5943	 Since we know we are going to reload this item, don't decrement for
5944	 the indirection level.
5945
5946	 Note that this is actually conservative:  it would be slightly more
5947	 efficient to use the value of SPILL_INDIRECT_LEVELS from
5948	 reload1.c here.  */
5949
5950      find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5951			    opnum, ADDR_TYPE (type), ind_levels, insn);
5952      push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5953		   context_reg_class,
5954		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5955      return 1;
5956
5957    case REG:
5958      {
5959	int regno = REGNO (x);
5960
5961	if (reg_equiv_constant (regno) != 0)
5962	  {
5963	    find_reloads_address_part (reg_equiv_constant (regno), loc,
5964				       context_reg_class,
5965				       GET_MODE (x), opnum, type, ind_levels);
5966	    return 1;
5967	  }
5968
5969#if 0 /* This might screw code in reload1.c to delete prior output-reload
5970	 that feeds this insn.  */
5971	if (reg_equiv_mem (regno) != 0)
5972	  {
5973	    push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5974			 context_reg_class,
5975			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5976	    return 1;
5977	  }
5978#endif
5979
5980	if (reg_equiv_memory_loc (regno)
5981	    && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5982	  {
5983	    rtx tem = make_memloc (x, regno);
5984	    if (reg_equiv_address (regno) != 0
5985		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5986	      {
5987		x = tem;
5988		find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5989				      &XEXP (x, 0), opnum, ADDR_TYPE (type),
5990				      ind_levels, insn);
5991		if (!rtx_equal_p (x, tem))
5992		  push_reg_equiv_alt_mem (regno, x);
5993	      }
5994	  }
5995
5996	if (reg_renumber[regno] >= 0)
5997	  regno = reg_renumber[regno];
5998
5999	if (regno >= FIRST_PSEUDO_REGISTER
6000	    || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6001				    index_code))
6002	  {
6003	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
6004			 context_reg_class,
6005			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6006	    return 1;
6007	  }
6008
6009	/* If a register appearing in an address is the subject of a CLOBBER
6010	   in this insn, reload it into some other register to be safe.
6011	   The CLOBBER is supposed to make the register unavailable
6012	   from before this insn to after it.  */
6013	if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
6014	  {
6015	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
6016			 context_reg_class,
6017			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6018	    return 1;
6019	  }
6020      }
6021      return 0;
6022
6023    case SUBREG:
6024      if (REG_P (SUBREG_REG (x)))
6025	{
6026	  /* If this is a SUBREG of a hard register and the resulting register
6027	     is of the wrong class, reload the whole SUBREG.  This avoids
6028	     needless copies if SUBREG_REG is multi-word.  */
6029	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6030	    {
6031	      int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6032
6033	      if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6034				       index_code))
6035		{
6036		  push_reload (x, NULL_RTX, loc, (rtx*) 0,
6037			       context_reg_class,
6038			       GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6039		  return 1;
6040		}
6041	    }
6042	  /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6043	     is larger than the class size, then reload the whole SUBREG.  */
6044	  else
6045	    {
6046	      enum reg_class rclass = context_reg_class;
6047	      if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6048		  > reg_class_size[(int) rclass])
6049		{
6050		  /* If the inner register will be replaced by a memory
6051		     reference, we can do this only if we can replace the
6052		     whole subreg by a (narrower) memory reference.  If
6053		     this is not possible, fall through and reload just
6054		     the inner register (including address reloads).  */
6055		  if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6056		    {
6057		      rtx tem = find_reloads_subreg_address (x, opnum,
6058							     ADDR_TYPE (type),
6059							     ind_levels, insn,
6060							     NULL);
6061		      if (tem)
6062			{
6063			  push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6064				       GET_MODE (tem), VOIDmode, 0, 0,
6065				       opnum, type);
6066			  return 1;
6067			}
6068		    }
6069		  else
6070		    {
6071		      push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6072				   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6073		      return 1;
6074		    }
6075		}
6076	    }
6077	}
6078      break;
6079
6080    default:
6081      break;
6082    }
6083
6084  {
6085    const char *fmt = GET_RTX_FORMAT (code);
6086    int i;
6087
6088    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6089      {
6090	if (fmt[i] == 'e')
6091	  /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6092	     we get here.  */
6093	  find_reloads_address_1 (mode, as, XEXP (x, i), context,
6094				  code, SCRATCH, &XEXP (x, i),
6095				  opnum, type, ind_levels, insn);
6096      }
6097  }
6098
6099#undef REG_OK_FOR_CONTEXT
6100  return 0;
6101}
6102
6103/* X, which is found at *LOC, is a part of an address that needs to be
6104   reloaded into a register of class RCLASS.  If X is a constant, or if
6105   X is a PLUS that contains a constant, check that the constant is a
6106   legitimate operand and that we are supposed to be able to load
6107   it into the register.
6108
6109   If not, force the constant into memory and reload the MEM instead.
6110
6111   MODE is the mode to use, in case X is an integer constant.
6112
6113   OPNUM and TYPE describe the purpose of any reloads made.
6114
6115   IND_LEVELS says how many levels of indirect addressing this machine
6116   supports.  */
6117
6118static void
6119find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6120			   machine_mode mode, int opnum,
6121			   enum reload_type type, int ind_levels)
6122{
6123  if (CONSTANT_P (x)
6124      && (!targetm.legitimate_constant_p (mode, x)
6125	  || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6126    {
6127      x = force_const_mem (mode, x);
6128      find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6129			    opnum, type, ind_levels, 0);
6130    }
6131
6132  else if (GET_CODE (x) == PLUS
6133	   && CONSTANT_P (XEXP (x, 1))
6134	   && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6135	       || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6136		   == NO_REGS))
6137    {
6138      rtx tem;
6139
6140      tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6141      x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6142      find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6143			    opnum, type, ind_levels, 0);
6144    }
6145
6146  push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6147	       mode, VOIDmode, 0, 0, opnum, type);
6148}
6149
6150/* X, a subreg of a pseudo, is a part of an address that needs to be
6151   reloaded, and the pseusdo is equivalent to a memory location.
6152
6153   Attempt to replace the whole subreg by a (possibly narrower or wider)
6154   memory reference.  If this is possible, return this new memory
6155   reference, and push all required address reloads.  Otherwise,
6156   return NULL.
6157
6158   OPNUM and TYPE identify the purpose of the reload.
6159
6160   IND_LEVELS says how many levels of indirect addressing are
6161   supported at this point in the address.
6162
6163   INSN, if nonzero, is the insn in which we do the reload.  It is used
6164   to determine where to put USEs for pseudos that we have to replace with
6165   stack slots.  */
6166
6167static rtx
6168find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6169			     int ind_levels, rtx_insn *insn,
6170			     int *address_reloaded)
6171{
6172  machine_mode outer_mode = GET_MODE (x);
6173  machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6174  int regno = REGNO (SUBREG_REG (x));
6175  int reloaded = 0;
6176  rtx tem, orig;
6177  int offset;
6178
6179  gcc_assert (reg_equiv_memory_loc (regno) != 0);
6180
6181  /* We cannot replace the subreg with a modified memory reference if:
6182
6183     - we have a paradoxical subreg that implicitly acts as a zero or
6184       sign extension operation due to LOAD_EXTEND_OP;
6185
6186     - we have a subreg that is implicitly supposed to act on the full
6187       register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6188
6189     - the address of the equivalent memory location is mode-dependent;  or
6190
6191     - we have a paradoxical subreg and the resulting memory is not
6192       sufficiently aligned to allow access in the wider mode.
6193
6194    In addition, we choose not to perform the replacement for *any*
6195    paradoxical subreg, even if it were possible in principle.  This
6196    is to avoid generating wider memory references than necessary.
6197
6198    This corresponds to how previous versions of reload used to handle
6199    paradoxical subregs where no address reload was required.  */
6200
6201  if (paradoxical_subreg_p (x))
6202    return NULL;
6203
6204#ifdef WORD_REGISTER_OPERATIONS
6205  if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6206      && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6207          == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6208    return NULL;
6209#endif
6210
6211  /* Since we don't attempt to handle paradoxical subregs, we can just
6212     call into simplify_subreg, which will handle all remaining checks
6213     for us.  */
6214  orig = make_memloc (SUBREG_REG (x), regno);
6215  offset = SUBREG_BYTE (x);
6216  tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6217  if (!tem || !MEM_P (tem))
6218    return NULL;
6219
6220  /* Now push all required address reloads, if any.  */
6221  reloaded = find_reloads_address (GET_MODE (tem), &tem,
6222				   XEXP (tem, 0), &XEXP (tem, 0),
6223				   opnum, type, ind_levels, insn);
6224  /* ??? Do we need to handle nonzero offsets somehow?  */
6225  if (!offset && !rtx_equal_p (tem, orig))
6226    push_reg_equiv_alt_mem (regno, tem);
6227
6228  /* For some processors an address may be valid in the original mode but
6229     not in a smaller mode.  For example, ARM accepts a scaled index register
6230     in SImode but not in HImode.  Note that this is only a problem if the
6231     address in reg_equiv_mem is already invalid in the new mode; other
6232     cases would be fixed by find_reloads_address as usual.
6233
6234     ??? We attempt to handle such cases here by doing an additional reload
6235     of the full address after the usual processing by find_reloads_address.
6236     Note that this may not work in the general case, but it seems to cover
6237     the cases where this situation currently occurs.  A more general fix
6238     might be to reload the *value* instead of the address, but this would
6239     not be expected by the callers of this routine as-is.
6240
6241     If find_reloads_address already completed replaced the address, there
6242     is nothing further to do.  */
6243  if (reloaded == 0
6244      && reg_equiv_mem (regno) != 0
6245      && !strict_memory_address_addr_space_p
6246		(GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6247		 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6248    {
6249      push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6250		   base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6251				   MEM, SCRATCH),
6252		   GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6253      reloaded = 1;
6254    }
6255
6256  /* If this is not a toplevel operand, find_reloads doesn't see this
6257     substitution.  We have to emit a USE of the pseudo so that
6258     delete_output_reload can see it.  */
6259  if (replace_reloads && recog_data.operand[opnum] != x)
6260    /* We mark the USE with QImode so that we recognize it as one that
6261       can be safely deleted at the end of reload.  */
6262    PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6263	      QImode);
6264
6265  if (address_reloaded)
6266    *address_reloaded = reloaded;
6267
6268  return tem;
6269}
6270
6271/* Substitute into the current INSN the registers into which we have reloaded
6272   the things that need reloading.  The array `replacements'
6273   contains the locations of all pointers that must be changed
6274   and says what to replace them with.
6275
6276   Return the rtx that X translates into; usually X, but modified.  */
6277
6278void
6279subst_reloads (rtx_insn *insn)
6280{
6281  int i;
6282
6283  for (i = 0; i < n_replacements; i++)
6284    {
6285      struct replacement *r = &replacements[i];
6286      rtx reloadreg = rld[r->what].reg_rtx;
6287      if (reloadreg)
6288	{
6289#ifdef DEBUG_RELOAD
6290	  /* This checking takes a very long time on some platforms
6291	     causing the gcc.c-torture/compile/limits-fnargs.c test
6292	     to time out during testing.  See PR 31850.
6293
6294	     Internal consistency test.  Check that we don't modify
6295	     anything in the equivalence arrays.  Whenever something from
6296	     those arrays needs to be reloaded, it must be unshared before
6297	     being substituted into; the equivalence must not be modified.
6298	     Otherwise, if the equivalence is used after that, it will
6299	     have been modified, and the thing substituted (probably a
6300	     register) is likely overwritten and not a usable equivalence.  */
6301	  int check_regno;
6302
6303	  for (check_regno = 0; check_regno < max_regno; check_regno++)
6304	    {
6305#define CHECK_MODF(ARRAY)						\
6306	      gcc_assert (!(*reg_equivs)[check_regno].ARRAY		\
6307			  || !loc_mentioned_in_p (r->where,		\
6308						  (*reg_equivs)[check_regno].ARRAY))
6309
6310	      CHECK_MODF (constant);
6311	      CHECK_MODF (memory_loc);
6312	      CHECK_MODF (address);
6313	      CHECK_MODF (mem);
6314#undef CHECK_MODF
6315	    }
6316#endif /* DEBUG_RELOAD */
6317
6318	  /* If we're replacing a LABEL_REF with a register, there must
6319	     already be an indication (to e.g. flow) which label this
6320	     register refers to.  */
6321	  gcc_assert (GET_CODE (*r->where) != LABEL_REF
6322		      || !JUMP_P (insn)
6323		      || find_reg_note (insn,
6324					REG_LABEL_OPERAND,
6325					XEXP (*r->where, 0))
6326		      || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6327
6328	  /* Encapsulate RELOADREG so its machine mode matches what
6329	     used to be there.  Note that gen_lowpart_common will
6330	     do the wrong thing if RELOADREG is multi-word.  RELOADREG
6331	     will always be a REG here.  */
6332	  if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6333	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6334
6335	  *r->where = reloadreg;
6336	}
6337      /* If reload got no reg and isn't optional, something's wrong.  */
6338      else
6339	gcc_assert (rld[r->what].optional);
6340    }
6341}
6342
6343/* Make a copy of any replacements being done into X and move those
6344   copies to locations in Y, a copy of X.  */
6345
6346void
6347copy_replacements (rtx x, rtx y)
6348{
6349  copy_replacements_1 (&x, &y, n_replacements);
6350}
6351
6352static void
6353copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6354{
6355  int i, j;
6356  rtx x, y;
6357  struct replacement *r;
6358  enum rtx_code code;
6359  const char *fmt;
6360
6361  for (j = 0; j < orig_replacements; j++)
6362    if (replacements[j].where == px)
6363      {
6364	r = &replacements[n_replacements++];
6365	r->where = py;
6366	r->what = replacements[j].what;
6367	r->mode = replacements[j].mode;
6368      }
6369
6370  x = *px;
6371  y = *py;
6372  code = GET_CODE (x);
6373  fmt = GET_RTX_FORMAT (code);
6374
6375  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6376    {
6377      if (fmt[i] == 'e')
6378	copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6379      else if (fmt[i] == 'E')
6380	for (j = XVECLEN (x, i); --j >= 0; )
6381	  copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6382			       orig_replacements);
6383    }
6384}
6385
6386/* Change any replacements being done to *X to be done to *Y.  */
6387
6388void
6389move_replacements (rtx *x, rtx *y)
6390{
6391  int i;
6392
6393  for (i = 0; i < n_replacements; i++)
6394    if (replacements[i].where == x)
6395      replacements[i].where = y;
6396}
6397
6398/* If LOC was scheduled to be replaced by something, return the replacement.
6399   Otherwise, return *LOC.  */
6400
6401rtx
6402find_replacement (rtx *loc)
6403{
6404  struct replacement *r;
6405
6406  for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6407    {
6408      rtx reloadreg = rld[r->what].reg_rtx;
6409
6410      if (reloadreg && r->where == loc)
6411	{
6412	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6413	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6414
6415	  return reloadreg;
6416	}
6417      else if (reloadreg && GET_CODE (*loc) == SUBREG
6418	       && r->where == &SUBREG_REG (*loc))
6419	{
6420	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6421	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6422
6423	  return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6424				      GET_MODE (SUBREG_REG (*loc)),
6425				      SUBREG_BYTE (*loc));
6426	}
6427    }
6428
6429  /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6430     what's inside and make a new rtl if so.  */
6431  if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6432      || GET_CODE (*loc) == MULT)
6433    {
6434      rtx x = find_replacement (&XEXP (*loc, 0));
6435      rtx y = find_replacement (&XEXP (*loc, 1));
6436
6437      if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6438	return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6439    }
6440
6441  return *loc;
6442}
6443
6444/* Return nonzero if register in range [REGNO, ENDREGNO)
6445   appears either explicitly or implicitly in X
6446   other than being stored into (except for earlyclobber operands).
6447
6448   References contained within the substructure at LOC do not count.
6449   LOC may be zero, meaning don't ignore anything.
6450
6451   This is similar to refers_to_regno_p in rtlanal.c except that we
6452   look at equivalences for pseudos that didn't get hard registers.  */
6453
6454static int
6455refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6456			      rtx x, rtx *loc)
6457{
6458  int i;
6459  unsigned int r;
6460  RTX_CODE code;
6461  const char *fmt;
6462
6463  if (x == 0)
6464    return 0;
6465
6466 repeat:
6467  code = GET_CODE (x);
6468
6469  switch (code)
6470    {
6471    case REG:
6472      r = REGNO (x);
6473
6474      /* If this is a pseudo, a hard register must not have been allocated.
6475	 X must therefore either be a constant or be in memory.  */
6476      if (r >= FIRST_PSEUDO_REGISTER)
6477	{
6478	  if (reg_equiv_memory_loc (r))
6479	    return refers_to_regno_for_reload_p (regno, endregno,
6480						 reg_equiv_memory_loc (r),
6481						 (rtx*) 0);
6482
6483	  gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6484	  return 0;
6485	}
6486
6487      return (endregno > r
6488	      && regno < r + (r < FIRST_PSEUDO_REGISTER
6489			      ? hard_regno_nregs[r][GET_MODE (x)]
6490			      : 1));
6491
6492    case SUBREG:
6493      /* If this is a SUBREG of a hard reg, we can see exactly which
6494	 registers are being modified.  Otherwise, handle normally.  */
6495      if (REG_P (SUBREG_REG (x))
6496	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6497	{
6498	  unsigned int inner_regno = subreg_regno (x);
6499	  unsigned int inner_endregno
6500	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6501			     ? subreg_nregs (x) : 1);
6502
6503	  return endregno > inner_regno && regno < inner_endregno;
6504	}
6505      break;
6506
6507    case CLOBBER:
6508    case SET:
6509      if (&SET_DEST (x) != loc
6510	  /* Note setting a SUBREG counts as referring to the REG it is in for
6511	     a pseudo but not for hard registers since we can
6512	     treat each word individually.  */
6513	  && ((GET_CODE (SET_DEST (x)) == SUBREG
6514	       && loc != &SUBREG_REG (SET_DEST (x))
6515	       && REG_P (SUBREG_REG (SET_DEST (x)))
6516	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6517	       && refers_to_regno_for_reload_p (regno, endregno,
6518						SUBREG_REG (SET_DEST (x)),
6519						loc))
6520	      /* If the output is an earlyclobber operand, this is
6521		 a conflict.  */
6522	      || ((!REG_P (SET_DEST (x))
6523		   || earlyclobber_operand_p (SET_DEST (x)))
6524		  && refers_to_regno_for_reload_p (regno, endregno,
6525						   SET_DEST (x), loc))))
6526	return 1;
6527
6528      if (code == CLOBBER || loc == &SET_SRC (x))
6529	return 0;
6530      x = SET_SRC (x);
6531      goto repeat;
6532
6533    default:
6534      break;
6535    }
6536
6537  /* X does not match, so try its subexpressions.  */
6538
6539  fmt = GET_RTX_FORMAT (code);
6540  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6541    {
6542      if (fmt[i] == 'e' && loc != &XEXP (x, i))
6543	{
6544	  if (i == 0)
6545	    {
6546	      x = XEXP (x, 0);
6547	      goto repeat;
6548	    }
6549	  else
6550	    if (refers_to_regno_for_reload_p (regno, endregno,
6551					      XEXP (x, i), loc))
6552	      return 1;
6553	}
6554      else if (fmt[i] == 'E')
6555	{
6556	  int j;
6557	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6558	    if (loc != &XVECEXP (x, i, j)
6559		&& refers_to_regno_for_reload_p (regno, endregno,
6560						 XVECEXP (x, i, j), loc))
6561	      return 1;
6562	}
6563    }
6564  return 0;
6565}
6566
6567/* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
6568   we check if any register number in X conflicts with the relevant register
6569   numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
6570   contains a MEM (we don't bother checking for memory addresses that can't
6571   conflict because we expect this to be a rare case.
6572
6573   This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6574   that we look at equivalences for pseudos that didn't get hard registers.  */
6575
6576int
6577reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6578{
6579  int regno, endregno;
6580
6581  /* Overly conservative.  */
6582  if (GET_CODE (x) == STRICT_LOW_PART
6583      || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6584    x = XEXP (x, 0);
6585
6586  /* If either argument is a constant, then modifying X can not affect IN.  */
6587  if (CONSTANT_P (x) || CONSTANT_P (in))
6588    return 0;
6589  else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6590    return refers_to_mem_for_reload_p (in);
6591  else if (GET_CODE (x) == SUBREG)
6592    {
6593      regno = REGNO (SUBREG_REG (x));
6594      if (regno < FIRST_PSEUDO_REGISTER)
6595	regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6596				      GET_MODE (SUBREG_REG (x)),
6597				      SUBREG_BYTE (x),
6598				      GET_MODE (x));
6599      endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6600			  ? subreg_nregs (x) : 1);
6601
6602      return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6603    }
6604  else if (REG_P (x))
6605    {
6606      regno = REGNO (x);
6607
6608      /* If this is a pseudo, it must not have been assigned a hard register.
6609	 Therefore, it must either be in memory or be a constant.  */
6610
6611      if (regno >= FIRST_PSEUDO_REGISTER)
6612	{
6613	  if (reg_equiv_memory_loc (regno))
6614	    return refers_to_mem_for_reload_p (in);
6615	  gcc_assert (reg_equiv_constant (regno));
6616	  return 0;
6617	}
6618
6619      endregno = END_HARD_REGNO (x);
6620
6621      return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6622    }
6623  else if (MEM_P (x))
6624    return refers_to_mem_for_reload_p (in);
6625  else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6626	   || GET_CODE (x) == CC0)
6627    return reg_mentioned_p (x, in);
6628  else
6629    {
6630      gcc_assert (GET_CODE (x) == PLUS);
6631
6632      /* We actually want to know if X is mentioned somewhere inside IN.
6633	 We must not say that (plus (sp) (const_int 124)) is in
6634	 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6635	 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6636	 into a RELOAD_OTHER on behalf of another RELOAD_OTHER.  */
6637      while (MEM_P (in))
6638	in = XEXP (in, 0);
6639      if (REG_P (in))
6640	return 0;
6641      else if (GET_CODE (in) == PLUS)
6642	return (rtx_equal_p (x, in)
6643		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6644		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6645      else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6646		   || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6647    }
6648
6649  gcc_unreachable ();
6650}
6651
6652/* Return nonzero if anything in X contains a MEM.  Look also for pseudo
6653   registers.  */
6654
6655static int
6656refers_to_mem_for_reload_p (rtx x)
6657{
6658  const char *fmt;
6659  int i;
6660
6661  if (MEM_P (x))
6662    return 1;
6663
6664  if (REG_P (x))
6665    return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6666	    && reg_equiv_memory_loc (REGNO (x)));
6667
6668  fmt = GET_RTX_FORMAT (GET_CODE (x));
6669  for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6670    if (fmt[i] == 'e'
6671	&& (MEM_P (XEXP (x, i))
6672	    || refers_to_mem_for_reload_p (XEXP (x, i))))
6673      return 1;
6674
6675  return 0;
6676}
6677
6678/* Check the insns before INSN to see if there is a suitable register
6679   containing the same value as GOAL.
6680   If OTHER is -1, look for a register in class RCLASS.
6681   Otherwise, just see if register number OTHER shares GOAL's value.
6682
6683   Return an rtx for the register found, or zero if none is found.
6684
6685   If RELOAD_REG_P is (short *)1,
6686   we reject any hard reg that appears in reload_reg_rtx
6687   because such a hard reg is also needed coming into this insn.
6688
6689   If RELOAD_REG_P is any other nonzero value,
6690   it is a vector indexed by hard reg number
6691   and we reject any hard reg whose element in the vector is nonnegative
6692   as well as any that appears in reload_reg_rtx.
6693
6694   If GOAL is zero, then GOALREG is a register number; we look
6695   for an equivalent for that register.
6696
6697   MODE is the machine mode of the value we want an equivalence for.
6698   If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6699
6700   This function is used by jump.c as well as in the reload pass.
6701
6702   If GOAL is the sum of the stack pointer and a constant, we treat it
6703   as if it were a constant except that sp is required to be unchanging.  */
6704
6705rtx
6706find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6707		short *reload_reg_p, int goalreg, machine_mode mode)
6708{
6709  rtx_insn *p = insn;
6710  rtx goaltry, valtry, value;
6711  rtx_insn *where;
6712  rtx pat;
6713  int regno = -1;
6714  int valueno;
6715  int goal_mem = 0;
6716  int goal_const = 0;
6717  int goal_mem_addr_varies = 0;
6718  int need_stable_sp = 0;
6719  int nregs;
6720  int valuenregs;
6721  int num = 0;
6722
6723  if (goal == 0)
6724    regno = goalreg;
6725  else if (REG_P (goal))
6726    regno = REGNO (goal);
6727  else if (MEM_P (goal))
6728    {
6729      enum rtx_code code = GET_CODE (XEXP (goal, 0));
6730      if (MEM_VOLATILE_P (goal))
6731	return 0;
6732      if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6733	return 0;
6734      /* An address with side effects must be reexecuted.  */
6735      switch (code)
6736	{
6737	case POST_INC:
6738	case PRE_INC:
6739	case POST_DEC:
6740	case PRE_DEC:
6741	case POST_MODIFY:
6742	case PRE_MODIFY:
6743	  return 0;
6744	default:
6745	  break;
6746	}
6747      goal_mem = 1;
6748    }
6749  else if (CONSTANT_P (goal))
6750    goal_const = 1;
6751  else if (GET_CODE (goal) == PLUS
6752	   && XEXP (goal, 0) == stack_pointer_rtx
6753	   && CONSTANT_P (XEXP (goal, 1)))
6754    goal_const = need_stable_sp = 1;
6755  else if (GET_CODE (goal) == PLUS
6756	   && XEXP (goal, 0) == frame_pointer_rtx
6757	   && CONSTANT_P (XEXP (goal, 1)))
6758    goal_const = 1;
6759  else
6760    return 0;
6761
6762  num = 0;
6763  /* Scan insns back from INSN, looking for one that copies
6764     a value into or out of GOAL.
6765     Stop and give up if we reach a label.  */
6766
6767  while (1)
6768    {
6769      p = PREV_INSN (p);
6770      if (p && DEBUG_INSN_P (p))
6771	continue;
6772      num++;
6773      if (p == 0 || LABEL_P (p)
6774	  || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6775	return 0;
6776
6777      /* Don't reuse register contents from before a setjmp-type
6778	 function call; on the second return (from the longjmp) it
6779	 might have been clobbered by a later reuse.  It doesn't
6780	 seem worthwhile to actually go and see if it is actually
6781	 reused even if that information would be readily available;
6782	 just don't reuse it across the setjmp call.  */
6783      if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6784	return 0;
6785
6786      if (NONJUMP_INSN_P (p)
6787	  /* If we don't want spill regs ...  */
6788	  && (! (reload_reg_p != 0
6789		 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6790	      /* ... then ignore insns introduced by reload; they aren't
6791		 useful and can cause results in reload_as_needed to be
6792		 different from what they were when calculating the need for
6793		 spills.  If we notice an input-reload insn here, we will
6794		 reject it below, but it might hide a usable equivalent.
6795		 That makes bad code.  It may even fail: perhaps no reg was
6796		 spilled for this insn because it was assumed we would find
6797		 that equivalent.  */
6798	      || INSN_UID (p) < reload_first_uid))
6799	{
6800	  rtx tem;
6801	  pat = single_set (p);
6802
6803	  /* First check for something that sets some reg equal to GOAL.  */
6804	  if (pat != 0
6805	      && ((regno >= 0
6806		   && true_regnum (SET_SRC (pat)) == regno
6807		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6808		  ||
6809		  (regno >= 0
6810		   && true_regnum (SET_DEST (pat)) == regno
6811		   && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6812		  ||
6813		  (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6814		   /* When looking for stack pointer + const,
6815		      make sure we don't use a stack adjust.  */
6816		   && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6817		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6818		  || (goal_mem
6819		      && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6820		      && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6821		  || (goal_mem
6822		      && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6823		      && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6824		  /* If we are looking for a constant,
6825		     and something equivalent to that constant was copied
6826		     into a reg, we can use that reg.  */
6827		  || (goal_const && REG_NOTES (p) != 0
6828		      && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6829		      && ((rtx_equal_p (XEXP (tem, 0), goal)
6830			   && (valueno
6831			       = true_regnum (valtry = SET_DEST (pat))) >= 0)
6832			  || (REG_P (SET_DEST (pat))
6833			      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6834			      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6835			      && CONST_INT_P (goal)
6836			      && 0 != (goaltry
6837				       = operand_subword (XEXP (tem, 0), 0, 0,
6838							  VOIDmode))
6839			      && rtx_equal_p (goal, goaltry)
6840			      && (valtry
6841				  = operand_subword (SET_DEST (pat), 0, 0,
6842						     VOIDmode))
6843			      && (valueno = true_regnum (valtry)) >= 0)))
6844		  || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6845							  NULL_RTX))
6846		      && REG_P (SET_DEST (pat))
6847		      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6848		      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6849		      && CONST_INT_P (goal)
6850		      && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6851							  VOIDmode))
6852		      && rtx_equal_p (goal, goaltry)
6853		      && (valtry
6854			  = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6855		      && (valueno = true_regnum (valtry)) >= 0)))
6856	    {
6857	      if (other >= 0)
6858		{
6859		  if (valueno != other)
6860		    continue;
6861		}
6862	      else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6863		continue;
6864	      else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6865					  mode, valueno))
6866		continue;
6867	      value = valtry;
6868	      where = p;
6869	      break;
6870	    }
6871	}
6872    }
6873
6874  /* We found a previous insn copying GOAL into a suitable other reg VALUE
6875     (or copying VALUE into GOAL, if GOAL is also a register).
6876     Now verify that VALUE is really valid.  */
6877
6878  /* VALUENO is the register number of VALUE; a hard register.  */
6879
6880  /* Don't try to re-use something that is killed in this insn.  We want
6881     to be able to trust REG_UNUSED notes.  */
6882  if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6883    return 0;
6884
6885  /* If we propose to get the value from the stack pointer or if GOAL is
6886     a MEM based on the stack pointer, we need a stable SP.  */
6887  if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6888      || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6889							  goal)))
6890    need_stable_sp = 1;
6891
6892  /* Reject VALUE if the copy-insn moved the wrong sort of datum.  */
6893  if (GET_MODE (value) != mode)
6894    return 0;
6895
6896  /* Reject VALUE if it was loaded from GOAL
6897     and is also a register that appears in the address of GOAL.  */
6898
6899  if (goal_mem && value == SET_DEST (single_set (where))
6900      && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6901				       goal, (rtx*) 0))
6902    return 0;
6903
6904  /* Reject registers that overlap GOAL.  */
6905
6906  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6907    nregs = hard_regno_nregs[regno][mode];
6908  else
6909    nregs = 1;
6910  valuenregs = hard_regno_nregs[valueno][mode];
6911
6912  if (!goal_mem && !goal_const
6913      && regno + nregs > valueno && regno < valueno + valuenregs)
6914    return 0;
6915
6916  /* Reject VALUE if it is one of the regs reserved for reloads.
6917     Reload1 knows how to reuse them anyway, and it would get
6918     confused if we allocated one without its knowledge.
6919     (Now that insns introduced by reload are ignored above,
6920     this case shouldn't happen, but I'm not positive.)  */
6921
6922  if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6923    {
6924      int i;
6925      for (i = 0; i < valuenregs; ++i)
6926	if (reload_reg_p[valueno + i] >= 0)
6927	  return 0;
6928    }
6929
6930  /* Reject VALUE if it is a register being used for an input reload
6931     even if it is not one of those reserved.  */
6932
6933  if (reload_reg_p != 0)
6934    {
6935      int i;
6936      for (i = 0; i < n_reloads; i++)
6937	if (rld[i].reg_rtx != 0 && rld[i].in)
6938	  {
6939	    int regno1 = REGNO (rld[i].reg_rtx);
6940	    int nregs1 = hard_regno_nregs[regno1]
6941					 [GET_MODE (rld[i].reg_rtx)];
6942	    if (regno1 < valueno + valuenregs
6943		&& regno1 + nregs1 > valueno)
6944	      return 0;
6945	  }
6946    }
6947
6948  if (goal_mem)
6949    /* We must treat frame pointer as varying here,
6950       since it can vary--in a nonlocal goto as generated by expand_goto.  */
6951    goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6952
6953  /* Now verify that the values of GOAL and VALUE remain unaltered
6954     until INSN is reached.  */
6955
6956  p = insn;
6957  while (1)
6958    {
6959      p = PREV_INSN (p);
6960      if (p == where)
6961	return value;
6962
6963      /* Don't trust the conversion past a function call
6964	 if either of the two is in a call-clobbered register, or memory.  */
6965      if (CALL_P (p))
6966	{
6967	  int i;
6968
6969	  if (goal_mem || need_stable_sp)
6970	    return 0;
6971
6972	  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6973	    for (i = 0; i < nregs; ++i)
6974	      if (call_used_regs[regno + i]
6975		  || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6976		return 0;
6977
6978	  if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6979	    for (i = 0; i < valuenregs; ++i)
6980	      if (call_used_regs[valueno + i]
6981		  || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6982		return 0;
6983	}
6984
6985      if (INSN_P (p))
6986	{
6987	  pat = PATTERN (p);
6988
6989	  /* Watch out for unspec_volatile, and volatile asms.  */
6990	  if (volatile_insn_p (pat))
6991	    return 0;
6992
6993	  /* If this insn P stores in either GOAL or VALUE, return 0.
6994	     If GOAL is a memory ref and this insn writes memory, return 0.
6995	     If GOAL is a memory ref and its address is not constant,
6996	     and this insn P changes a register used in GOAL, return 0.  */
6997
6998	  if (GET_CODE (pat) == COND_EXEC)
6999	    pat = COND_EXEC_CODE (pat);
7000	  if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7001	    {
7002	      rtx dest = SET_DEST (pat);
7003	      while (GET_CODE (dest) == SUBREG
7004		     || GET_CODE (dest) == ZERO_EXTRACT
7005		     || GET_CODE (dest) == STRICT_LOW_PART)
7006		dest = XEXP (dest, 0);
7007	      if (REG_P (dest))
7008		{
7009		  int xregno = REGNO (dest);
7010		  int xnregs;
7011		  if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7012		    xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7013		  else
7014		    xnregs = 1;
7015		  if (xregno < regno + nregs && xregno + xnregs > regno)
7016		    return 0;
7017		  if (xregno < valueno + valuenregs
7018		      && xregno + xnregs > valueno)
7019		    return 0;
7020		  if (goal_mem_addr_varies
7021		      && reg_overlap_mentioned_for_reload_p (dest, goal))
7022		    return 0;
7023		  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7024		    return 0;
7025		}
7026	      else if (goal_mem && MEM_P (dest)
7027		       && ! push_operand (dest, GET_MODE (dest)))
7028		return 0;
7029	      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7030		       && reg_equiv_memory_loc (regno) != 0)
7031		return 0;
7032	      else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7033		return 0;
7034	    }
7035	  else if (GET_CODE (pat) == PARALLEL)
7036	    {
7037	      int i;
7038	      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7039		{
7040		  rtx v1 = XVECEXP (pat, 0, i);
7041		  if (GET_CODE (v1) == COND_EXEC)
7042		    v1 = COND_EXEC_CODE (v1);
7043		  if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7044		    {
7045		      rtx dest = SET_DEST (v1);
7046		      while (GET_CODE (dest) == SUBREG
7047			     || GET_CODE (dest) == ZERO_EXTRACT
7048			     || GET_CODE (dest) == STRICT_LOW_PART)
7049			dest = XEXP (dest, 0);
7050		      if (REG_P (dest))
7051			{
7052			  int xregno = REGNO (dest);
7053			  int xnregs;
7054			  if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7055			    xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7056			  else
7057			    xnregs = 1;
7058			  if (xregno < regno + nregs
7059			      && xregno + xnregs > regno)
7060			    return 0;
7061			  if (xregno < valueno + valuenregs
7062			      && xregno + xnregs > valueno)
7063			    return 0;
7064			  if (goal_mem_addr_varies
7065			      && reg_overlap_mentioned_for_reload_p (dest,
7066								     goal))
7067			    return 0;
7068			  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7069			    return 0;
7070			}
7071		      else if (goal_mem && MEM_P (dest)
7072			       && ! push_operand (dest, GET_MODE (dest)))
7073			return 0;
7074		      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7075			       && reg_equiv_memory_loc (regno) != 0)
7076			return 0;
7077		      else if (need_stable_sp
7078			       && push_operand (dest, GET_MODE (dest)))
7079			return 0;
7080		    }
7081		}
7082	    }
7083
7084	  if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7085	    {
7086	      rtx link;
7087
7088	      for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7089		   link = XEXP (link, 1))
7090		{
7091		  pat = XEXP (link, 0);
7092		  if (GET_CODE (pat) == CLOBBER)
7093		    {
7094		      rtx dest = SET_DEST (pat);
7095
7096		      if (REG_P (dest))
7097			{
7098			  int xregno = REGNO (dest);
7099			  int xnregs
7100			    = hard_regno_nregs[xregno][GET_MODE (dest)];
7101
7102			  if (xregno < regno + nregs
7103			      && xregno + xnregs > regno)
7104			    return 0;
7105			  else if (xregno < valueno + valuenregs
7106				   && xregno + xnregs > valueno)
7107			    return 0;
7108			  else if (goal_mem_addr_varies
7109				   && reg_overlap_mentioned_for_reload_p (dest,
7110								     goal))
7111			    return 0;
7112			}
7113
7114		      else if (goal_mem && MEM_P (dest)
7115			       && ! push_operand (dest, GET_MODE (dest)))
7116			return 0;
7117		      else if (need_stable_sp
7118			       && push_operand (dest, GET_MODE (dest)))
7119			return 0;
7120		    }
7121		}
7122	    }
7123
7124#ifdef AUTO_INC_DEC
7125	  /* If this insn auto-increments or auto-decrements
7126	     either regno or valueno, return 0 now.
7127	     If GOAL is a memory ref and its address is not constant,
7128	     and this insn P increments a register used in GOAL, return 0.  */
7129	  {
7130	    rtx link;
7131
7132	    for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7133	      if (REG_NOTE_KIND (link) == REG_INC
7134		  && REG_P (XEXP (link, 0)))
7135		{
7136		  int incno = REGNO (XEXP (link, 0));
7137		  if (incno < regno + nregs && incno >= regno)
7138		    return 0;
7139		  if (incno < valueno + valuenregs && incno >= valueno)
7140		    return 0;
7141		  if (goal_mem_addr_varies
7142		      && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7143							     goal))
7144		    return 0;
7145		}
7146	  }
7147#endif
7148	}
7149    }
7150}
7151
7152/* Find a place where INCED appears in an increment or decrement operator
7153   within X, and return the amount INCED is incremented or decremented by.
7154   The value is always positive.  */
7155
7156static int
7157find_inc_amount (rtx x, rtx inced)
7158{
7159  enum rtx_code code = GET_CODE (x);
7160  const char *fmt;
7161  int i;
7162
7163  if (code == MEM)
7164    {
7165      rtx addr = XEXP (x, 0);
7166      if ((GET_CODE (addr) == PRE_DEC
7167	   || GET_CODE (addr) == POST_DEC
7168	   || GET_CODE (addr) == PRE_INC
7169	   || GET_CODE (addr) == POST_INC)
7170	  && XEXP (addr, 0) == inced)
7171	return GET_MODE_SIZE (GET_MODE (x));
7172      else if ((GET_CODE (addr) == PRE_MODIFY
7173		|| GET_CODE (addr) == POST_MODIFY)
7174	       && GET_CODE (XEXP (addr, 1)) == PLUS
7175	       && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7176	       && XEXP (addr, 0) == inced
7177	       && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7178	{
7179	  i = INTVAL (XEXP (XEXP (addr, 1), 1));
7180	  return i < 0 ? -i : i;
7181	}
7182    }
7183
7184  fmt = GET_RTX_FORMAT (code);
7185  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7186    {
7187      if (fmt[i] == 'e')
7188	{
7189	  int tem = find_inc_amount (XEXP (x, i), inced);
7190	  if (tem != 0)
7191	    return tem;
7192	}
7193      if (fmt[i] == 'E')
7194	{
7195	  int j;
7196	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7197	    {
7198	      int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7199	      if (tem != 0)
7200		return tem;
7201	    }
7202	}
7203    }
7204
7205  return 0;
7206}
7207
7208/* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7209   REG_INC note in insn INSN.  REGNO must refer to a hard register.  */
7210
7211#ifdef AUTO_INC_DEC
7212static int
7213reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7214			   rtx insn)
7215{
7216  rtx link;
7217
7218  gcc_assert (insn);
7219
7220  if (! INSN_P (insn))
7221    return 0;
7222
7223  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7224    if (REG_NOTE_KIND (link) == REG_INC)
7225      {
7226	unsigned int test = (int) REGNO (XEXP (link, 0));
7227	if (test >= regno && test < endregno)
7228	  return 1;
7229      }
7230  return 0;
7231}
7232#else
7233
7234#define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7235
7236#endif
7237
7238/* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7239   If SETS is 1, also consider SETs.  If SETS is 2, enable checking
7240   REG_INC.  REGNO must refer to a hard register.  */
7241
7242int
7243regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7244		   int sets)
7245{
7246  unsigned int nregs, endregno;
7247
7248  /* regno must be a hard register.  */
7249  gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7250
7251  nregs = hard_regno_nregs[regno][mode];
7252  endregno = regno + nregs;
7253
7254  if ((GET_CODE (PATTERN (insn)) == CLOBBER
7255       || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7256      && REG_P (XEXP (PATTERN (insn), 0)))
7257    {
7258      unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7259
7260      return test >= regno && test < endregno;
7261    }
7262
7263  if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7264    return 1;
7265
7266  if (GET_CODE (PATTERN (insn)) == PARALLEL)
7267    {
7268      int i = XVECLEN (PATTERN (insn), 0) - 1;
7269
7270      for (; i >= 0; i--)
7271	{
7272	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7273	  if ((GET_CODE (elt) == CLOBBER
7274	       || (sets == 1 && GET_CODE (elt) == SET))
7275	      && REG_P (XEXP (elt, 0)))
7276	    {
7277	      unsigned int test = REGNO (XEXP (elt, 0));
7278
7279	      if (test >= regno && test < endregno)
7280		return 1;
7281	    }
7282	  if (sets == 2
7283	      && reg_inc_found_and_valid_p (regno, endregno, elt))
7284	    return 1;
7285	}
7286    }
7287
7288  return 0;
7289}
7290
7291/* Find the low part, with mode MODE, of a hard regno RELOADREG.  */
7292rtx
7293reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7294{
7295  int regno;
7296
7297  if (GET_MODE (reloadreg) == mode)
7298    return reloadreg;
7299
7300  regno = REGNO (reloadreg);
7301
7302  if (REG_WORDS_BIG_ENDIAN)
7303    regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7304      - (int) hard_regno_nregs[regno][mode];
7305
7306  return gen_rtx_REG (mode, regno);
7307}
7308
7309static const char *const reload_when_needed_name[] =
7310{
7311  "RELOAD_FOR_INPUT",
7312  "RELOAD_FOR_OUTPUT",
7313  "RELOAD_FOR_INSN",
7314  "RELOAD_FOR_INPUT_ADDRESS",
7315  "RELOAD_FOR_INPADDR_ADDRESS",
7316  "RELOAD_FOR_OUTPUT_ADDRESS",
7317  "RELOAD_FOR_OUTADDR_ADDRESS",
7318  "RELOAD_FOR_OPERAND_ADDRESS",
7319  "RELOAD_FOR_OPADDR_ADDR",
7320  "RELOAD_OTHER",
7321  "RELOAD_FOR_OTHER_ADDRESS"
7322};
7323
7324/* These functions are used to print the variables set by 'find_reloads' */
7325
7326DEBUG_FUNCTION void
7327debug_reload_to_stream (FILE *f)
7328{
7329  int r;
7330  const char *prefix;
7331
7332  if (! f)
7333    f = stderr;
7334  for (r = 0; r < n_reloads; r++)
7335    {
7336      fprintf (f, "Reload %d: ", r);
7337
7338      if (rld[r].in != 0)
7339	{
7340	  fprintf (f, "reload_in (%s) = ",
7341		   GET_MODE_NAME (rld[r].inmode));
7342	  print_inline_rtx (f, rld[r].in, 24);
7343	  fprintf (f, "\n\t");
7344	}
7345
7346      if (rld[r].out != 0)
7347	{
7348	  fprintf (f, "reload_out (%s) = ",
7349		   GET_MODE_NAME (rld[r].outmode));
7350	  print_inline_rtx (f, rld[r].out, 24);
7351	  fprintf (f, "\n\t");
7352	}
7353
7354      fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7355
7356      fprintf (f, "%s (opnum = %d)",
7357	       reload_when_needed_name[(int) rld[r].when_needed],
7358	       rld[r].opnum);
7359
7360      if (rld[r].optional)
7361	fprintf (f, ", optional");
7362
7363      if (rld[r].nongroup)
7364	fprintf (f, ", nongroup");
7365
7366      if (rld[r].inc != 0)
7367	fprintf (f, ", inc by %d", rld[r].inc);
7368
7369      if (rld[r].nocombine)
7370	fprintf (f, ", can't combine");
7371
7372      if (rld[r].secondary_p)
7373	fprintf (f, ", secondary_reload_p");
7374
7375      if (rld[r].in_reg != 0)
7376	{
7377	  fprintf (f, "\n\treload_in_reg: ");
7378	  print_inline_rtx (f, rld[r].in_reg, 24);
7379	}
7380
7381      if (rld[r].out_reg != 0)
7382	{
7383	  fprintf (f, "\n\treload_out_reg: ");
7384	  print_inline_rtx (f, rld[r].out_reg, 24);
7385	}
7386
7387      if (rld[r].reg_rtx != 0)
7388	{
7389	  fprintf (f, "\n\treload_reg_rtx: ");
7390	  print_inline_rtx (f, rld[r].reg_rtx, 24);
7391	}
7392
7393      prefix = "\n\t";
7394      if (rld[r].secondary_in_reload != -1)
7395	{
7396	  fprintf (f, "%ssecondary_in_reload = %d",
7397		   prefix, rld[r].secondary_in_reload);
7398	  prefix = ", ";
7399	}
7400
7401      if (rld[r].secondary_out_reload != -1)
7402	fprintf (f, "%ssecondary_out_reload = %d\n",
7403		 prefix, rld[r].secondary_out_reload);
7404
7405      prefix = "\n\t";
7406      if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7407	{
7408	  fprintf (f, "%ssecondary_in_icode = %s", prefix,
7409		   insn_data[rld[r].secondary_in_icode].name);
7410	  prefix = ", ";
7411	}
7412
7413      if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7414	fprintf (f, "%ssecondary_out_icode = %s", prefix,
7415		 insn_data[rld[r].secondary_out_icode].name);
7416
7417      fprintf (f, "\n");
7418    }
7419}
7420
7421DEBUG_FUNCTION void
7422debug_reload (void)
7423{
7424  debug_reload_to_stream (stderr);
7425}
7426