Deleted Added
sdiff udiff text old ( 102780 ) new ( 104752 )
full compact
1/* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24
25#include "machmode.h"
26#include "hard-reg-set.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "obstack.h"
30#include "insn-config.h"
31#include "flags.h"
32#include "function.h"
33#include "expr.h"
34#include "optabs.h"
35#include "regs.h"
36#include "basic-block.h"
37#include "reload.h"
38#include "recog.h"
39#include "output.h"
40#include "cselib.h"
41#include "real.h"
42#include "toplev.h"
43#include "except.h"
44#include "tree.h"
45
46/* This file contains the reload pass of the compiler, which is
47 run after register allocation has been done. It checks that
48 each insn is valid (operands required to be in registers really
49 are in registers of the proper class) and fixes up invalid ones
50 by copying values temporarily into registers for the insns
51 that need them.
52
53 The results of register allocation are described by the vector
54 reg_renumber; the insns still contain pseudo regs, but reg_renumber
55 can be used to find which hard reg, if any, a pseudo reg is in.
56
57 The technique we always use is to free up a few hard regs that are
58 called ``reload regs'', and for each place where a pseudo reg
59 must be in a hard reg, copy it temporarily into one of the reload regs.
60
61 Reload regs are allocated locally for every instruction that needs
62 reloads. When there are pseudos which are allocated to a register that
63 has been chosen as a reload reg, such pseudos must be ``spilled''.
64 This means that they go to other hard regs, or to stack slots if no other
65 available hard regs can be found. Spilling can invalidate more
66 insns, requiring additional need for reloads, so we must keep checking
67 until the process stabilizes.
68
69 For machines with different classes of registers, we must keep track
70 of the register class needed for each reload, and make sure that
71 we allocate enough reload registers of each class.
72
73 The file reload.c contains the code that checks one insn for
74 validity and reports the reloads that it needs. This file
75 is in charge of scanning the entire rtl code, accumulating the
76 reload needs, spilling, assigning reload registers to use for
77 fixing up each insn, and generating the new insns to copy values
78 into the reload registers. */
79
80#ifndef REGISTER_MOVE_COST
81#define REGISTER_MOVE_COST(m, x, y) 2
82#endif
83
84#ifndef LOCAL_REGNO
85#define LOCAL_REGNO(REGNO) 0
86#endif
87
88/* During reload_as_needed, element N contains a REG rtx for the hard reg
89 into which reg N has been reloaded (perhaps for a previous insn). */
90static rtx *reg_last_reload_reg;
91
92/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
93 for an output reload that stores into reg N. */
94static char *reg_has_output_reload;
95
96/* Indicates which hard regs are reload-registers for an output reload
97 in the current insn. */
98static HARD_REG_SET reg_is_output_reload;
99
100/* Element N is the constant value to which pseudo reg N is equivalent,
101 or zero if pseudo reg N is not equivalent to a constant.
102 find_reloads looks at this in order to replace pseudo reg N
103 with the constant it stands for. */
104rtx *reg_equiv_constant;
105
106/* Element N is a memory location to which pseudo reg N is equivalent,
107 prior to any register elimination (such as frame pointer to stack
108 pointer). Depending on whether or not it is a valid address, this value
109 is transferred to either reg_equiv_address or reg_equiv_mem. */
110rtx *reg_equiv_memory_loc;
111
112/* Element N is the address of stack slot to which pseudo reg N is equivalent.
113 This is used when the address is not valid as a memory address
114 (because its displacement is too big for the machine.) */
115rtx *reg_equiv_address;
116
117/* Element N is the memory slot to which pseudo reg N is equivalent,
118 or zero if pseudo reg N is not equivalent to a memory slot. */
119rtx *reg_equiv_mem;
120
121/* Widest width in which each pseudo reg is referred to (via subreg). */
122static unsigned int *reg_max_ref_width;
123
124/* Element N is the list of insns that initialized reg N from its equivalent
125 constant or memory slot. */
126static rtx *reg_equiv_init;
127
128/* Vector to remember old contents of reg_renumber before spilling. */
129static short *reg_old_renumber;
130
131/* During reload_as_needed, element N contains the last pseudo regno reloaded
132 into hard register N. If that pseudo reg occupied more than one register,
133 reg_reloaded_contents points to that pseudo for each spill register in
134 use; all of these must remain set for an inheritance to occur. */
135static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
136
137/* During reload_as_needed, element N contains the insn for which
138 hard register N was last used. Its contents are significant only
139 when reg_reloaded_valid is set for this register. */
140static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
141
142/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
143static HARD_REG_SET reg_reloaded_valid;
144/* Indicate if the register was dead at the end of the reload.
145 This is only valid if reg_reloaded_contents is set and valid. */
146static HARD_REG_SET reg_reloaded_dead;
147
148/* Number of spill-regs so far; number of valid elements of spill_regs. */
149static int n_spills;
150
151/* In parallel with spill_regs, contains REG rtx's for those regs.
152 Holds the last rtx used for any given reg, or 0 if it has never
153 been used for spilling yet. This rtx is reused, provided it has
154 the proper mode. */
155static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
156
157/* In parallel with spill_regs, contains nonzero for a spill reg
158 that was stored after the last time it was used.
159 The precise value is the insn generated to do the store. */
160static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
161
162/* This is the register that was stored with spill_reg_store. This is a
163 copy of reload_out / reload_out_reg when the value was stored; if
164 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
165static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
166
167/* This table is the inverse mapping of spill_regs:
168 indexed by hard reg number,
169 it contains the position of that reg in spill_regs,
170 or -1 for something that is not in spill_regs.
171
172 ?!? This is no longer accurate. */
173static short spill_reg_order[FIRST_PSEUDO_REGISTER];
174
175/* This reg set indicates registers that can't be used as spill registers for
176 the currently processed insn. These are the hard registers which are live
177 during the insn, but not allocated to pseudos, as well as fixed
178 registers. */
179static HARD_REG_SET bad_spill_regs;
180
181/* These are the hard registers that can't be used as spill register for any
182 insn. This includes registers used for user variables and registers that
183 we can't eliminate. A register that appears in this set also can't be used
184 to retry register allocation. */
185static HARD_REG_SET bad_spill_regs_global;
186
187/* Describes order of use of registers for reloading
188 of spilled pseudo-registers. `n_spills' is the number of
189 elements that are actually valid; new ones are added at the end.
190
191 Both spill_regs and spill_reg_order are used on two occasions:
192 once during find_reload_regs, where they keep track of the spill registers
193 for a single insn, but also during reload_as_needed where they show all
194 the registers ever used by reload. For the latter case, the information
195 is calculated during finish_spills. */
196static short spill_regs[FIRST_PSEUDO_REGISTER];
197
198/* This vector of reg sets indicates, for each pseudo, which hard registers
199 may not be used for retrying global allocation because the register was
200 formerly spilled from one of them. If we allowed reallocating a pseudo to
201 a register that it was already allocated to, reload might not
202 terminate. */
203static HARD_REG_SET *pseudo_previous_regs;
204
205/* This vector of reg sets indicates, for each pseudo, which hard
206 registers may not be used for retrying global allocation because they
207 are used as spill registers during one of the insns in which the
208 pseudo is live. */
209static HARD_REG_SET *pseudo_forbidden_regs;
210
211/* All hard regs that have been used as spill registers for any insn are
212 marked in this set. */
213static HARD_REG_SET used_spill_regs;
214
215/* Index of last register assigned as a spill register. We allocate in
216 a round-robin fashion. */
217static int last_spill_reg;
218
219/* Nonzero if indirect addressing is supported on the machine; this means
220 that spilling (REG n) does not require reloading it into a register in
221 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
222 value indicates the level of indirect addressing supported, e.g., two
223 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
224 a hard register. */
225static char spill_indirect_levels;
226
227/* Nonzero if indirect addressing is supported when the innermost MEM is
228 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
229 which these are valid is the same as spill_indirect_levels, above. */
230char indirect_symref_ok;
231
232/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
233char double_reg_address_ok;
234
235/* Record the stack slot for each spilled hard register. */
236static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
237
238/* Width allocated so far for that stack slot. */
239static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240
241/* Record which pseudos needed to be spilled. */
242static regset_head spilled_pseudos;
243
244/* Used for communication between order_regs_for_reload and count_pseudo.
245 Used to avoid counting one pseudo twice. */
246static regset_head pseudos_counted;
247
248/* First uid used by insns created by reload in this function.
249 Used in find_equiv_reg. */
250int reload_first_uid;
251
252/* Flag set by local-alloc or global-alloc if anything is live in
253 a call-clobbered reg across calls. */
254int caller_save_needed;
255
256/* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
258int reload_in_progress = 0;
259
260/* These arrays record the insn_code of insns that may be needed to
261 perform input and output reloads of special objects. They provide a
262 place to pass a scratch register. */
263enum insn_code reload_in_optab[NUM_MACHINE_MODES];
264enum insn_code reload_out_optab[NUM_MACHINE_MODES];
265
266/* This obstack is used for allocation of rtl during register elimination.
267 The allocated storage can be freed once find_reloads has processed the
268 insn. */
269struct obstack reload_obstack;
270
271/* Points to the beginning of the reload_obstack. All insn_chain structures
272 are allocated first. */
273char *reload_startobj;
274
275/* The point after all insn_chain structures. Used to quickly deallocate
276 memory allocated in copy_reloads during calculate_needs_all_insns. */
277char *reload_firstobj;
278
279/* This points before all local rtl generated by register elimination.
280 Used to quickly free all memory after processing one insn. */
281static char *reload_insn_firstobj;
282
283#define obstack_chunk_alloc xmalloc
284#define obstack_chunk_free free
285
286/* List of insn_chain instructions, one for every insn that reload needs to
287 examine. */
288struct insn_chain *reload_insn_chain;
289
290#ifdef TREE_CODE
291extern tree current_function_decl;
292#else
293extern union tree_node *current_function_decl;
294#endif
295
296/* List of all insns needing reloads. */
297static struct insn_chain *insns_need_reload;
298
299/* This structure is used to record information about register eliminations.
300 Each array entry describes one possible way of eliminating a register
301 in favor of another. If there is more than one way of eliminating a
302 particular register, the most preferred should be specified first. */
303
304struct elim_table
305{
306 int from; /* Register number to be eliminated. */
307 int to; /* Register number used as replacement. */
308 int initial_offset; /* Initial difference between values. */
309 int can_eliminate; /* Non-zero if this elimination can be done. */
310 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
311 insns made by reload. */
312 int offset; /* Current offset between the two regs. */
313 int previous_offset; /* Offset at end of previous insn. */
314 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
315 rtx from_rtx; /* REG rtx for the register to be eliminated.
316 We cannot simply compare the number since
317 we might then spuriously replace a hard
318 register corresponding to a pseudo
319 assigned to the reg to be eliminated. */
320 rtx to_rtx; /* REG rtx for the replacement. */
321};
322
323static struct elim_table *reg_eliminate = 0;
324
325/* This is an intermediate structure to initialize the table. It has
326 exactly the members provided by ELIMINABLE_REGS. */
327static const struct elim_table_1
328{
329 const int from;
330 const int to;
331} reg_eliminate_1[] =
332
333/* If a set of eliminable registers was specified, define the table from it.
334 Otherwise, default to the normal case of the frame pointer being
335 replaced by the stack pointer. */
336
337#ifdef ELIMINABLE_REGS
338 ELIMINABLE_REGS;
339#else
340 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
341#endif
342
343#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
344
345/* Record the number of pending eliminations that have an offset not equal
346 to their initial offset. If non-zero, we use a new copy of each
347 replacement result in any insns encountered. */
348int num_not_at_initial_offset;
349
350/* Count the number of registers that we may be able to eliminate. */
351static int num_eliminable;
352/* And the number of registers that are equivalent to a constant that
353 can be eliminated to frame_pointer / arg_pointer + constant. */
354static int num_eliminable_invariants;
355
356/* For each label, we record the offset of each elimination. If we reach
357 a label by more than one path and an offset differs, we cannot do the
358 elimination. This information is indexed by the number of the label.
359 The first table is an array of flags that records whether we have yet
360 encountered a label and the second table is an array of arrays, one
361 entry in the latter array for each elimination. */
362
363static char *offsets_known_at;
364static int (*offsets_at)[NUM_ELIMINABLE_REGS];
365
366/* Number of labels in the current function. */
367
368static int num_labels;
369
370static void replace_pseudos_in_call_usage PARAMS ((rtx *,
371 enum machine_mode,
372 rtx));
373static void maybe_fix_stack_asms PARAMS ((void));
374static void copy_reloads PARAMS ((struct insn_chain *));
375static void calculate_needs_all_insns PARAMS ((int));
376static int find_reg PARAMS ((struct insn_chain *, int));
377static void find_reload_regs PARAMS ((struct insn_chain *));
378static void select_reload_regs PARAMS ((void));
379static void delete_caller_save_insns PARAMS ((void));
380
381static void spill_failure PARAMS ((rtx, enum reg_class));
382static void count_spilled_pseudo PARAMS ((int, int, int));
383static void delete_dead_insn PARAMS ((rtx));
384static void alter_reg PARAMS ((int, int));
385static void set_label_offsets PARAMS ((rtx, rtx, int));
386static void check_eliminable_occurrences PARAMS ((rtx));
387static void elimination_effects PARAMS ((rtx, enum machine_mode));
388static int eliminate_regs_in_insn PARAMS ((rtx, int));
389static void update_eliminable_offsets PARAMS ((void));
390static void mark_not_eliminable PARAMS ((rtx, rtx, void *));
391static void set_initial_elim_offsets PARAMS ((void));
392static void verify_initial_elim_offsets PARAMS ((void));
393static void set_initial_label_offsets PARAMS ((void));
394static void set_offsets_for_label PARAMS ((rtx));
395static void init_elim_table PARAMS ((void));
396static void update_eliminables PARAMS ((HARD_REG_SET *));
397static void spill_hard_reg PARAMS ((unsigned int, int));
398static int finish_spills PARAMS ((int));
399static void ior_hard_reg_set PARAMS ((HARD_REG_SET *, HARD_REG_SET *));
400static void scan_paradoxical_subregs PARAMS ((rtx));
401static void count_pseudo PARAMS ((int));
402static void order_regs_for_reload PARAMS ((struct insn_chain *));
403static void reload_as_needed PARAMS ((int));
404static void forget_old_reloads_1 PARAMS ((rtx, rtx, void *));
405static int reload_reg_class_lower PARAMS ((const PTR, const PTR));
406static void mark_reload_reg_in_use PARAMS ((unsigned int, int,
407 enum reload_type,
408 enum machine_mode));
409static void clear_reload_reg_in_use PARAMS ((unsigned int, int,
410 enum reload_type,
411 enum machine_mode));
412static int reload_reg_free_p PARAMS ((unsigned int, int,
413 enum reload_type));
414static int reload_reg_free_for_value_p PARAMS ((int, int, int,
415 enum reload_type,
416 rtx, rtx, int, int));
417static int free_for_value_p PARAMS ((int, enum machine_mode, int,
418 enum reload_type, rtx, rtx,
419 int, int));
420static int reload_reg_reaches_end_p PARAMS ((unsigned int, int,
421 enum reload_type));
422static int allocate_reload_reg PARAMS ((struct insn_chain *, int,
423 int));
424static int conflicts_with_override PARAMS ((rtx));
425static void failed_reload PARAMS ((rtx, int));
426static int set_reload_reg PARAMS ((int, int));
427static void choose_reload_regs_init PARAMS ((struct insn_chain *, rtx *));
428static void choose_reload_regs PARAMS ((struct insn_chain *));
429static void merge_assigned_reloads PARAMS ((rtx));
430static void emit_input_reload_insns PARAMS ((struct insn_chain *,
431 struct reload *, rtx, int));
432static void emit_output_reload_insns PARAMS ((struct insn_chain *,
433 struct reload *, int));
434static void do_input_reload PARAMS ((struct insn_chain *,
435 struct reload *, int));
436static void do_output_reload PARAMS ((struct insn_chain *,
437 struct reload *, int));
438static void emit_reload_insns PARAMS ((struct insn_chain *));
439static void delete_output_reload PARAMS ((rtx, int, int));
440static void delete_address_reloads PARAMS ((rtx, rtx));
441static void delete_address_reloads_1 PARAMS ((rtx, rtx, rtx));
442static rtx inc_for_reload PARAMS ((rtx, rtx, rtx, int));
443static int constraint_accepts_reg_p PARAMS ((const char *, rtx));
444static void reload_cse_regs_1 PARAMS ((rtx));
445static int reload_cse_noop_set_p PARAMS ((rtx));
446static int reload_cse_simplify_set PARAMS ((rtx, rtx));
447static int reload_cse_simplify_operands PARAMS ((rtx));
448static void reload_combine PARAMS ((void));
449static void reload_combine_note_use PARAMS ((rtx *, rtx));
450static void reload_combine_note_store PARAMS ((rtx, rtx, void *));
451static void reload_cse_move2add PARAMS ((rtx));
452static void move2add_note_store PARAMS ((rtx, rtx, void *));
453#ifdef AUTO_INC_DEC
454static void add_auto_inc_notes PARAMS ((rtx, rtx));
455#endif
456static void copy_eh_notes PARAMS ((rtx, rtx));
457static HOST_WIDE_INT sext_for_mode PARAMS ((enum machine_mode,
458 HOST_WIDE_INT));
459static void failed_reload PARAMS ((rtx, int));
460static int set_reload_reg PARAMS ((int, int));
461static void reload_cse_delete_noop_set PARAMS ((rtx, rtx));
462static void reload_cse_simplify PARAMS ((rtx));
463void fixup_abnormal_edges PARAMS ((void));
464extern void dump_needs PARAMS ((struct insn_chain *));
465
466/* Initialize the reload pass once per compilation. */
467
468void
469init_reload ()
470{
471 int i;
472
473 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
474 Set spill_indirect_levels to the number of levels such addressing is
475 permitted, zero if it is not permitted at all. */
476
477 rtx tem
478 = gen_rtx_MEM (Pmode,
479 gen_rtx_PLUS (Pmode,
480 gen_rtx_REG (Pmode,
481 LAST_VIRTUAL_REGISTER + 1),
482 GEN_INT (4)));
483 spill_indirect_levels = 0;
484
485 while (memory_address_p (QImode, tem))
486 {
487 spill_indirect_levels++;
488 tem = gen_rtx_MEM (Pmode, tem);
489 }
490
491 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
492
493 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
494 indirect_symref_ok = memory_address_p (QImode, tem);
495
496 /* See if reg+reg is a valid (and offsettable) address. */
497
498 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
499 {
500 tem = gen_rtx_PLUS (Pmode,
501 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
502 gen_rtx_REG (Pmode, i));
503
504 /* This way, we make sure that reg+reg is an offsettable address. */
505 tem = plus_constant (tem, 4);
506
507 if (memory_address_p (QImode, tem))
508 {
509 double_reg_address_ok = 1;
510 break;
511 }
512 }
513
514 /* Initialize obstack for our rtl allocation. */
515 gcc_obstack_init (&reload_obstack);
516 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
517
518 INIT_REG_SET (&spilled_pseudos);
519 INIT_REG_SET (&pseudos_counted);
520}
521
522/* List of insn chains that are currently unused. */
523static struct insn_chain *unused_insn_chains = 0;
524
525/* Allocate an empty insn_chain structure. */
526struct insn_chain *
527new_insn_chain ()
528{
529 struct insn_chain *c;
530
531 if (unused_insn_chains == 0)
532 {
533 c = (struct insn_chain *)
534 obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
535 INIT_REG_SET (&c->live_throughout);
536 INIT_REG_SET (&c->dead_or_set);
537 }
538 else
539 {
540 c = unused_insn_chains;
541 unused_insn_chains = c->next;
542 }
543 c->is_caller_save_insn = 0;
544 c->need_operand_change = 0;
545 c->need_reload = 0;
546 c->need_elim = 0;
547 return c;
548}
549
550/* Small utility function to set all regs in hard reg set TO which are
551 allocated to pseudos in regset FROM. */
552
553void
554compute_use_by_pseudos (to, from)
555 HARD_REG_SET *to;
556 regset from;
557{
558 unsigned int regno;
559
560 EXECUTE_IF_SET_IN_REG_SET
561 (from, FIRST_PSEUDO_REGISTER, regno,
562 {
563 int r = reg_renumber[regno];
564 int nregs;
565
566 if (r < 0)
567 {
568 /* reload_combine uses the information from
569 BASIC_BLOCK->global_live_at_start, which might still
570 contain registers that have not actually been allocated
571 since they have an equivalence. */
572 if (! reload_completed)
573 abort ();
574 }
575 else
576 {
577 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
578 while (nregs-- > 0)
579 SET_HARD_REG_BIT (*to, r + nregs);
580 }
581 });
582}
583
584/* Replace all pseudos found in LOC with their corresponding
585 equivalences. */
586
587static void
588replace_pseudos_in_call_usage (loc, mem_mode, usage)
589 rtx *loc;
590 enum machine_mode mem_mode;
591 rtx usage;
592{
593 rtx x = *loc;
594 enum rtx_code code;
595 const char *fmt;
596 int i, j;
597
598 if (! x)
599 return;
600
601 code = GET_CODE (x);
602 if (code == REG)
603 {
604 unsigned int regno = REGNO (x);
605
606 if (regno < FIRST_PSEUDO_REGISTER)
607 return;
608
609 x = eliminate_regs (x, mem_mode, usage);
610 if (x != *loc)
611 {
612 *loc = x;
613 replace_pseudos_in_call_usage (loc, mem_mode, usage);
614 return;
615 }
616
617 if (reg_equiv_constant[regno])
618 *loc = reg_equiv_constant[regno];
619 else if (reg_equiv_mem[regno])
620 *loc = reg_equiv_mem[regno];
621 else if (reg_equiv_address[regno])
622 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
623 else if (GET_CODE (regno_reg_rtx[regno]) != REG
624 || REGNO (regno_reg_rtx[regno]) != regno)
625 *loc = regno_reg_rtx[regno];
626 else
627 abort ();
628
629 return;
630 }
631 else if (code == MEM)
632 {
633 replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
634 return;
635 }
636
637 /* Process each of our operands recursively. */
638 fmt = GET_RTX_FORMAT (code);
639 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
640 if (*fmt == 'e')
641 replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
642 else if (*fmt == 'E')
643 for (j = 0; j < XVECLEN (x, i); j++)
644 replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
645}
646
647
648/* Global variables used by reload and its subroutines. */
649
650/* Set during calculate_needs if an insn needs register elimination. */
651static int something_needs_elimination;
652/* Set during calculate_needs if an insn needs an operand changed. */
653int something_needs_operands_changed;
654
655/* Nonzero means we couldn't get enough spill regs. */
656static int failure;
657
658/* Main entry point for the reload pass.
659
660 FIRST is the first insn of the function being compiled.
661
662 GLOBAL nonzero means we were called from global_alloc
663 and should attempt to reallocate any pseudoregs that we
664 displace from hard regs we will use for reloads.
665 If GLOBAL is zero, we do not have enough information to do that,
666 so any pseudo reg that is spilled must go to the stack.
667
668 Return value is nonzero if reload failed
669 and we must not do any more for this function. */
670
671int
672reload (first, global)
673 rtx first;
674 int global;
675{
676 int i;
677 rtx insn;
678 struct elim_table *ep;
679
680 /* The two pointers used to track the true location of the memory used
681 for label offsets. */
682 char *real_known_ptr = NULL;
683 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
684
685 /* Make sure even insns with volatile mem refs are recognizable. */
686 init_recog ();
687
688 failure = 0;
689
690 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
691
692 /* Make sure that the last insn in the chain
693 is not something that needs reloading. */
694 emit_note (NULL, NOTE_INSN_DELETED);
695
696 /* Enable find_equiv_reg to distinguish insns made by reload. */
697 reload_first_uid = get_max_uid ();
698
699#ifdef SECONDARY_MEMORY_NEEDED
700 /* Initialize the secondary memory table. */
701 clear_secondary_mem ();
702#endif
703
704 /* We don't have a stack slot for any spill reg yet. */
705 memset ((char *) spill_stack_slot, 0, sizeof spill_stack_slot);
706 memset ((char *) spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
707
708 /* Initialize the save area information for caller-save, in case some
709 are needed. */
710 init_save_areas ();
711
712 /* Compute which hard registers are now in use
713 as homes for pseudo registers.
714 This is done here rather than (eg) in global_alloc
715 because this point is reached even if not optimizing. */
716 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
717 mark_home_live (i);
718
719 /* A function that receives a nonlocal goto must save all call-saved
720 registers. */
721 if (current_function_has_nonlocal_label)
722 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
723 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
724 regs_ever_live[i] = 1;
725
726 /* Find all the pseudo registers that didn't get hard regs
727 but do have known equivalent constants or memory slots.
728 These include parameters (known equivalent to parameter slots)
729 and cse'd or loop-moved constant memory addresses.
730
731 Record constant equivalents in reg_equiv_constant
732 so they will be substituted by find_reloads.
733 Record memory equivalents in reg_mem_equiv so they can
734 be substituted eventually by altering the REG-rtx's. */
735
736 reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
737 reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
738 reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
739 reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
740 reg_max_ref_width = (unsigned int *) xcalloc (max_regno, sizeof (int));
741 reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
742 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
743 pseudo_forbidden_regs
744 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
745 pseudo_previous_regs
746 = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
747
748 CLEAR_HARD_REG_SET (bad_spill_regs_global);
749
750 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
751 Also find all paradoxical subregs and find largest such for each pseudo.
752 On machines with small register classes, record hard registers that
753 are used for user variables. These can never be used for spills.
754 Also look for a "constant" REG_SETJMP. This means that all
755 caller-saved registers must be marked live. */
756
757 num_eliminable_invariants = 0;
758 for (insn = first; insn; insn = NEXT_INSN (insn))
759 {
760 rtx set = single_set (insn);
761
762 /* We may introduce USEs that we want to remove at the end, so
763 we'll mark them with QImode. Make sure there are no
764 previously-marked insns left by say regmove. */
765 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
766 && GET_MODE (insn) != VOIDmode)
767 PUT_MODE (insn, VOIDmode);
768
769 if (GET_CODE (insn) == CALL_INSN
770 && find_reg_note (insn, REG_SETJMP, NULL))
771 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
772 if (! call_used_regs[i])
773 regs_ever_live[i] = 1;
774
775 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
776 {
777 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
778 if (note
779#ifdef LEGITIMATE_PIC_OPERAND_P
780 && (! function_invariant_p (XEXP (note, 0))
781 || ! flag_pic
782 /* A function invariant is often CONSTANT_P but may
783 include a register. We promise to only pass
784 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
785 || (CONSTANT_P (XEXP (note, 0))
786 && LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0))))
787#endif
788 )
789 {
790 rtx x = XEXP (note, 0);
791 i = REGNO (SET_DEST (set));
792 if (i > LAST_VIRTUAL_REGISTER)
793 {
794 /* It can happen that a REG_EQUIV note contains a MEM
795 that is not a legitimate memory operand. As later
796 stages of reload assume that all addresses found
797 in the reg_equiv_* arrays were originally legitimate,
798 we ignore such REG_EQUIV notes. */
799 if (memory_operand (x, VOIDmode))
800 {
801 /* Always unshare the equivalence, so we can
802 substitute into this insn without touching the
803 equivalence. */
804 reg_equiv_memory_loc[i] = copy_rtx (x);
805 }
806 else if (function_invariant_p (x))
807 {
808 if (GET_CODE (x) == PLUS)
809 {
810 /* This is PLUS of frame pointer and a constant,
811 and might be shared. Unshare it. */
812 reg_equiv_constant[i] = copy_rtx (x);
813 num_eliminable_invariants++;
814 }
815 else if (x == frame_pointer_rtx
816 || x == arg_pointer_rtx)
817 {
818 reg_equiv_constant[i] = x;
819 num_eliminable_invariants++;
820 }
821 else if (LEGITIMATE_CONSTANT_P (x))
822 reg_equiv_constant[i] = x;
823 else
824 reg_equiv_memory_loc[i]
825 = force_const_mem (GET_MODE (SET_DEST (set)), x);
826 }
827 else
828 continue;
829
830 /* If this register is being made equivalent to a MEM
831 and the MEM is not SET_SRC, the equivalencing insn
832 is one with the MEM as a SET_DEST and it occurs later.
833 So don't mark this insn now. */
834 if (GET_CODE (x) != MEM
835 || rtx_equal_p (SET_SRC (set), x))
836 reg_equiv_init[i]
837 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
838 }
839 }
840 }
841
842 /* If this insn is setting a MEM from a register equivalent to it,
843 this is the equivalencing insn. */
844 else if (set && GET_CODE (SET_DEST (set)) == MEM
845 && GET_CODE (SET_SRC (set)) == REG
846 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
847 && rtx_equal_p (SET_DEST (set),
848 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
849 reg_equiv_init[REGNO (SET_SRC (set))]
850 = gen_rtx_INSN_LIST (VOIDmode, insn,
851 reg_equiv_init[REGNO (SET_SRC (set))]);
852
853 if (INSN_P (insn))
854 scan_paradoxical_subregs (PATTERN (insn));
855 }
856
857 init_elim_table ();
858
859 num_labels = max_label_num () - get_first_label_num ();
860
861 /* Allocate the tables used to store offset information at labels. */
862 /* We used to use alloca here, but the size of what it would try to
863 allocate would occasionally cause it to exceed the stack limit and
864 cause a core dump. */
865 real_known_ptr = xmalloc (num_labels);
866 real_at_ptr
867 = (int (*)[NUM_ELIMINABLE_REGS])
868 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
869
870 offsets_known_at = real_known_ptr - get_first_label_num ();
871 offsets_at
872 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
873
874 /* Alter each pseudo-reg rtx to contain its hard reg number.
875 Assign stack slots to the pseudos that lack hard regs or equivalents.
876 Do not touch virtual registers. */
877
878 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
879 alter_reg (i, -1);
880
881 /* If we have some registers we think can be eliminated, scan all insns to
882 see if there is an insn that sets one of these registers to something
883 other than itself plus a constant. If so, the register cannot be
884 eliminated. Doing this scan here eliminates an extra pass through the
885 main reload loop in the most common case where register elimination
886 cannot be done. */
887 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
888 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
889 || GET_CODE (insn) == CALL_INSN)
890 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
891
892 maybe_fix_stack_asms ();
893
894 insns_need_reload = 0;
895 something_needs_elimination = 0;
896
897 /* Initialize to -1, which means take the first spill register. */
898 last_spill_reg = -1;
899
900 /* Spill any hard regs that we know we can't eliminate. */
901 CLEAR_HARD_REG_SET (used_spill_regs);
902 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
903 if (! ep->can_eliminate)
904 spill_hard_reg (ep->from, 1);
905
906#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
907 if (frame_pointer_needed)
908 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
909#endif
910 finish_spills (global);
911
912 /* From now on, we may need to generate moves differently. We may also
913 allow modifications of insns which cause them to not be recognized.
914 Any such modifications will be cleaned up during reload itself. */
915 reload_in_progress = 1;
916
917 /* This loop scans the entire function each go-round
918 and repeats until one repetition spills no additional hard regs. */
919 for (;;)
920 {
921 int something_changed;
922 int did_spill;
923
924 HOST_WIDE_INT starting_frame_size;
925
926 /* Round size of stack frame to stack_alignment_needed. This must be done
927 here because the stack size may be a part of the offset computation
928 for register elimination, and there might have been new stack slots
929 created in the last iteration of this loop. */
930 if (cfun->stack_alignment_needed)
931 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
932
933 starting_frame_size = get_frame_size ();
934
935 set_initial_elim_offsets ();
936 set_initial_label_offsets ();
937
938 /* For each pseudo register that has an equivalent location defined,
939 try to eliminate any eliminable registers (such as the frame pointer)
940 assuming initial offsets for the replacement register, which
941 is the normal case.
942
943 If the resulting location is directly addressable, substitute
944 the MEM we just got directly for the old REG.
945
946 If it is not addressable but is a constant or the sum of a hard reg
947 and constant, it is probably not addressable because the constant is
948 out of range, in that case record the address; we will generate
949 hairy code to compute the address in a register each time it is
950 needed. Similarly if it is a hard register, but one that is not
951 valid as an address register.
952
953 If the location is not addressable, but does not have one of the
954 above forms, assign a stack slot. We have to do this to avoid the
955 potential of producing lots of reloads if, e.g., a location involves
956 a pseudo that didn't get a hard register and has an equivalent memory
957 location that also involves a pseudo that didn't get a hard register.
958
959 Perhaps at some point we will improve reload_when_needed handling
960 so this problem goes away. But that's very hairy. */
961
962 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
963 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
964 {
965 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
966
967 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
968 XEXP (x, 0)))
969 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
970 else if (CONSTANT_P (XEXP (x, 0))
971 || (GET_CODE (XEXP (x, 0)) == REG
972 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
973 || (GET_CODE (XEXP (x, 0)) == PLUS
974 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
975 && (REGNO (XEXP (XEXP (x, 0), 0))
976 < FIRST_PSEUDO_REGISTER)
977 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
978 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
979 else
980 {
981 /* Make a new stack slot. Then indicate that something
982 changed so we go back and recompute offsets for
983 eliminable registers because the allocation of memory
984 below might change some offset. reg_equiv_{mem,address}
985 will be set up for this pseudo on the next pass around
986 the loop. */
987 reg_equiv_memory_loc[i] = 0;
988 reg_equiv_init[i] = 0;
989 alter_reg (i, -1);
990 }
991 }
992
993 if (caller_save_needed)
994 setup_save_areas ();
995
996 /* If we allocated another stack slot, redo elimination bookkeeping. */
997 if (starting_frame_size != get_frame_size ())
998 continue;
999
1000 if (caller_save_needed)
1001 {
1002 save_call_clobbered_regs ();
1003 /* That might have allocated new insn_chain structures. */
1004 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1005 }
1006
1007 calculate_needs_all_insns (global);
1008
1009 CLEAR_REG_SET (&spilled_pseudos);
1010 did_spill = 0;
1011
1012 something_changed = 0;
1013
1014 /* If we allocated any new memory locations, make another pass
1015 since it might have changed elimination offsets. */
1016 if (starting_frame_size != get_frame_size ())
1017 something_changed = 1;
1018
1019 {
1020 HARD_REG_SET to_spill;
1021 CLEAR_HARD_REG_SET (to_spill);
1022 update_eliminables (&to_spill);
1023 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1024 if (TEST_HARD_REG_BIT (to_spill, i))
1025 {
1026 spill_hard_reg (i, 1);
1027 did_spill = 1;
1028
1029 /* Regardless of the state of spills, if we previously had
1030 a register that we thought we could eliminate, but no can
1031 not eliminate, we must run another pass.
1032
1033 Consider pseudos which have an entry in reg_equiv_* which
1034 reference an eliminable register. We must make another pass
1035 to update reg_equiv_* so that we do not substitute in the
1036 old value from when we thought the elimination could be
1037 performed. */
1038 something_changed = 1;
1039 }
1040 }
1041
1042 select_reload_regs ();
1043 if (failure)
1044 goto failed;
1045
1046 if (insns_need_reload != 0 || did_spill)
1047 something_changed |= finish_spills (global);
1048
1049 if (! something_changed)
1050 break;
1051
1052 if (caller_save_needed)
1053 delete_caller_save_insns ();
1054
1055 obstack_free (&reload_obstack, reload_firstobj);
1056 }
1057
1058 /* If global-alloc was run, notify it of any register eliminations we have
1059 done. */
1060 if (global)
1061 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1062 if (ep->can_eliminate)
1063 mark_elimination (ep->from, ep->to);
1064
1065 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1066 If that insn didn't set the register (i.e., it copied the register to
1067 memory), just delete that insn instead of the equivalencing insn plus
1068 anything now dead. If we call delete_dead_insn on that insn, we may
1069 delete the insn that actually sets the register if the register dies
1070 there and that is incorrect. */
1071
1072 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1073 {
1074 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1075 {
1076 rtx list;
1077 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1078 {
1079 rtx equiv_insn = XEXP (list, 0);
1080
1081 /* If we already deleted the insn or if it may trap, we can't
1082 delete it. The latter case shouldn't happen, but can
1083 if an insn has a variable address, gets a REG_EH_REGION
1084 note added to it, and then gets converted into an load
1085 from a constant address. */
1086 if (GET_CODE (equiv_insn) == NOTE
1087 || can_throw_internal (equiv_insn))
1088 ;
1089 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1090 delete_dead_insn (equiv_insn);
1091 else
1092 {
1093 PUT_CODE (equiv_insn, NOTE);
1094 NOTE_SOURCE_FILE (equiv_insn) = 0;
1095 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1096 }
1097 }
1098 }
1099 }
1100
1101 /* Use the reload registers where necessary
1102 by generating move instructions to move the must-be-register
1103 values into or out of the reload registers. */
1104
1105 if (insns_need_reload != 0 || something_needs_elimination
1106 || something_needs_operands_changed)
1107 {
1108 HOST_WIDE_INT old_frame_size = get_frame_size ();
1109
1110 reload_as_needed (global);
1111
1112 if (old_frame_size != get_frame_size ())
1113 abort ();
1114
1115 if (num_eliminable)
1116 verify_initial_elim_offsets ();
1117 }
1118
1119 /* If we were able to eliminate the frame pointer, show that it is no
1120 longer live at the start of any basic block. If it ls live by
1121 virtue of being in a pseudo, that pseudo will be marked live
1122 and hence the frame pointer will be known to be live via that
1123 pseudo. */
1124
1125 if (! frame_pointer_needed)
1126 for (i = 0; i < n_basic_blocks; i++)
1127 CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start,
1128 HARD_FRAME_POINTER_REGNUM);
1129
1130 /* Come here (with failure set nonzero) if we can't get enough spill regs
1131 and we decide not to abort about it. */
1132 failed:
1133
1134 CLEAR_REG_SET (&spilled_pseudos);
1135 reload_in_progress = 0;
1136
1137 /* Now eliminate all pseudo regs by modifying them into
1138 their equivalent memory references.
1139 The REG-rtx's for the pseudos are modified in place,
1140 so all insns that used to refer to them now refer to memory.
1141
1142 For a reg that has a reg_equiv_address, all those insns
1143 were changed by reloading so that no insns refer to it any longer;
1144 but the DECL_RTL of a variable decl may refer to it,
1145 and if so this causes the debugging info to mention the variable. */
1146
1147 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1148 {
1149 rtx addr = 0;
1150
1151 if (reg_equiv_mem[i])
1152 addr = XEXP (reg_equiv_mem[i], 0);
1153
1154 if (reg_equiv_address[i])
1155 addr = reg_equiv_address[i];
1156
1157 if (addr)
1158 {
1159 if (reg_renumber[i] < 0)
1160 {
1161 rtx reg = regno_reg_rtx[i];
1162
1163 PUT_CODE (reg, MEM);
1164 XEXP (reg, 0) = addr;
1165 REG_USERVAR_P (reg) = 0;
1166 if (reg_equiv_memory_loc[i])
1167 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1168 else
1169 {
1170 RTX_UNCHANGING_P (reg) = MEM_IN_STRUCT_P (reg)
1171 = MEM_SCALAR_P (reg) = 0;
1172 MEM_ATTRS (reg) = 0;
1173 }
1174 }
1175 else if (reg_equiv_mem[i])
1176 XEXP (reg_equiv_mem[i], 0) = addr;
1177 }
1178 }
1179
1180 /* We must set reload_completed now since the cleanup_subreg_operands call
1181 below will re-recognize each insn and reload may have generated insns
1182 which are only valid during and after reload. */
1183 reload_completed = 1;
1184
1185 /* Make a pass over all the insns and delete all USEs which we inserted
1186 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1187 notes. Delete all CLOBBER insns that don't refer to the return value
1188 and simplify (subreg (reg)) operands. Also remove all REG_RETVAL and
1189 REG_LIBCALL notes since they are no longer useful or accurate. Strip
1190 and regenerate REG_INC notes that may have been moved around. */
1191
1192 for (insn = first; insn; insn = NEXT_INSN (insn))
1193 if (INSN_P (insn))
1194 {
1195 rtx *pnote;
1196
1197 if (GET_CODE (insn) == CALL_INSN)
1198 replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
1199 VOIDmode,
1200 CALL_INSN_FUNCTION_USAGE (insn));
1201
1202 if ((GET_CODE (PATTERN (insn)) == USE
1203 /* We mark with QImode USEs introduced by reload itself. */
1204 && (GET_MODE (insn) == QImode
1205 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1206 || (GET_CODE (PATTERN (insn)) == CLOBBER
1207 && (GET_CODE (XEXP (PATTERN (insn), 0)) != REG
1208 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1209 {
1210 delete_insn (insn);
1211 continue;
1212 }
1213
1214 pnote = &REG_NOTES (insn);
1215 while (*pnote != 0)
1216 {
1217 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1218 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1219 || REG_NOTE_KIND (*pnote) == REG_INC
1220 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1221 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1222 *pnote = XEXP (*pnote, 1);
1223 else
1224 pnote = &XEXP (*pnote, 1);
1225 }
1226
1227#ifdef AUTO_INC_DEC
1228 add_auto_inc_notes (insn, PATTERN (insn));
1229#endif
1230
1231 /* And simplify (subreg (reg)) if it appears as an operand. */
1232 cleanup_subreg_operands (insn);
1233 }
1234
1235 /* If we are doing stack checking, give a warning if this function's
1236 frame size is larger than we expect. */
1237 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1238 {
1239 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1240 static int verbose_warned = 0;
1241
1242 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1243 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1244 size += UNITS_PER_WORD;
1245
1246 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1247 {
1248 warning ("frame size too large for reliable stack checking");
1249 if (! verbose_warned)
1250 {
1251 warning ("try reducing the number of local variables");
1252 verbose_warned = 1;
1253 }
1254 }
1255 }
1256
1257 /* Indicate that we no longer have known memory locations or constants. */
1258 if (reg_equiv_constant)
1259 free (reg_equiv_constant);
1260 reg_equiv_constant = 0;
1261 if (reg_equiv_memory_loc)
1262 free (reg_equiv_memory_loc);
1263 reg_equiv_memory_loc = 0;
1264
1265 if (real_known_ptr)
1266 free (real_known_ptr);
1267 if (real_at_ptr)
1268 free (real_at_ptr);
1269
1270 free (reg_equiv_mem);
1271 free (reg_equiv_init);
1272 free (reg_equiv_address);
1273 free (reg_max_ref_width);
1274 free (reg_old_renumber);
1275 free (pseudo_previous_regs);
1276 free (pseudo_forbidden_regs);
1277
1278 CLEAR_HARD_REG_SET (used_spill_regs);
1279 for (i = 0; i < n_spills; i++)
1280 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1281
1282 /* Free all the insn_chain structures at once. */
1283 obstack_free (&reload_obstack, reload_startobj);
1284 unused_insn_chains = 0;
1285 fixup_abnormal_edges ();
1286
1287 /* Replacing pseudos with their memory equivalents might have
1288 created shared rtx. Subsequent passes would get confused
1289 by this, so unshare everything here. */
1290 unshare_all_rtl_again (first);
1291
1292 return failure;
1293}
1294
1295/* Yet another special case. Unfortunately, reg-stack forces people to
1296 write incorrect clobbers in asm statements. These clobbers must not
1297 cause the register to appear in bad_spill_regs, otherwise we'll call
1298 fatal_insn later. We clear the corresponding regnos in the live
1299 register sets to avoid this.
1300 The whole thing is rather sick, I'm afraid. */
1301
1302static void
1303maybe_fix_stack_asms ()
1304{
1305#ifdef STACK_REGS
1306 const char *constraints[MAX_RECOG_OPERANDS];
1307 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1308 struct insn_chain *chain;
1309
1310 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1311 {
1312 int i, noperands;
1313 HARD_REG_SET clobbered, allowed;
1314 rtx pat;
1315
1316 if (! INSN_P (chain->insn)
1317 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1318 continue;
1319 pat = PATTERN (chain->insn);
1320 if (GET_CODE (pat) != PARALLEL)
1321 continue;
1322
1323 CLEAR_HARD_REG_SET (clobbered);
1324 CLEAR_HARD_REG_SET (allowed);
1325
1326 /* First, make a mask of all stack regs that are clobbered. */
1327 for (i = 0; i < XVECLEN (pat, 0); i++)
1328 {
1329 rtx t = XVECEXP (pat, 0, i);
1330 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1331 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1332 }
1333
1334 /* Get the operand values and constraints out of the insn. */
1335 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1336 constraints, operand_mode);
1337
1338 /* For every operand, see what registers are allowed. */
1339 for (i = 0; i < noperands; i++)
1340 {
1341 const char *p = constraints[i];
1342 /* For every alternative, we compute the class of registers allowed
1343 for reloading in CLS, and merge its contents into the reg set
1344 ALLOWED. */
1345 int cls = (int) NO_REGS;
1346
1347 for (;;)
1348 {
1349 char c = *p++;
1350
1351 if (c == '\0' || c == ',' || c == '#')
1352 {
1353 /* End of one alternative - mark the regs in the current
1354 class, and reset the class. */
1355 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1356 cls = NO_REGS;
1357 if (c == '#')
1358 do {
1359 c = *p++;
1360 } while (c != '\0' && c != ',');
1361 if (c == '\0')
1362 break;
1363 continue;
1364 }
1365
1366 switch (c)
1367 {
1368 case '=': case '+': case '*': case '%': case '?': case '!':
1369 case '0': case '1': case '2': case '3': case '4': case 'm':
1370 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1371 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1372 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1373 case 'P':
1374 break;
1375
1376 case 'p':
1377 cls = (int) reg_class_subunion[cls]
1378 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1379 break;
1380
1381 case 'g':
1382 case 'r':
1383 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1384 break;
1385
1386 default:
1387 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1388
1389 }
1390 }
1391 }
1392 /* Those of the registers which are clobbered, but allowed by the
1393 constraints, must be usable as reload registers. So clear them
1394 out of the life information. */
1395 AND_HARD_REG_SET (allowed, clobbered);
1396 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1397 if (TEST_HARD_REG_BIT (allowed, i))
1398 {
1399 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1400 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1401 }
1402 }
1403
1404#endif
1405}
1406
1407/* Copy the global variables n_reloads and rld into the corresponding elts
1408 of CHAIN. */
1409static void
1410copy_reloads (chain)
1411 struct insn_chain *chain;
1412{
1413 chain->n_reloads = n_reloads;
1414 chain->rld
1415 = (struct reload *) obstack_alloc (&reload_obstack,
1416 n_reloads * sizeof (struct reload));
1417 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1418 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1419}
1420
1421/* Walk the chain of insns, and determine for each whether it needs reloads
1422 and/or eliminations. Build the corresponding insns_need_reload list, and
1423 set something_needs_elimination as appropriate. */
1424static void
1425calculate_needs_all_insns (global)
1426 int global;
1427{
1428 struct insn_chain **pprev_reload = &insns_need_reload;
1429 struct insn_chain *chain, *next = 0;
1430
1431 something_needs_elimination = 0;
1432
1433 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1434 for (chain = reload_insn_chain; chain != 0; chain = next)
1435 {
1436 rtx insn = chain->insn;
1437
1438 next = chain->next;
1439
1440 /* Clear out the shortcuts. */
1441 chain->n_reloads = 0;
1442 chain->need_elim = 0;
1443 chain->need_reload = 0;
1444 chain->need_operand_change = 0;
1445
1446 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1447 include REG_LABEL), we need to see what effects this has on the
1448 known offsets at labels. */
1449
1450 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1451 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1452 set_label_offsets (insn, insn, 0);
1453
1454 if (INSN_P (insn))
1455 {
1456 rtx old_body = PATTERN (insn);
1457 int old_code = INSN_CODE (insn);
1458 rtx old_notes = REG_NOTES (insn);
1459 int did_elimination = 0;
1460 int operands_changed = 0;
1461 rtx set = single_set (insn);
1462
1463 /* Skip insns that only set an equivalence. */
1464 if (set && GET_CODE (SET_DEST (set)) == REG
1465 && reg_renumber[REGNO (SET_DEST (set))] < 0
1466 && reg_equiv_constant[REGNO (SET_DEST (set))])
1467 continue;
1468
1469 /* If needed, eliminate any eliminable registers. */
1470 if (num_eliminable || num_eliminable_invariants)
1471 did_elimination = eliminate_regs_in_insn (insn, 0);
1472
1473 /* Analyze the instruction. */
1474 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1475 global, spill_reg_order);
1476
1477 /* If a no-op set needs more than one reload, this is likely
1478 to be something that needs input address reloads. We
1479 can't get rid of this cleanly later, and it is of no use
1480 anyway, so discard it now.
1481 We only do this when expensive_optimizations is enabled,
1482 since this complements reload inheritance / output
1483 reload deletion, and it can make debugging harder. */
1484 if (flag_expensive_optimizations && n_reloads > 1)
1485 {
1486 rtx set = single_set (insn);
1487 if (set
1488 && SET_SRC (set) == SET_DEST (set)
1489 && GET_CODE (SET_SRC (set)) == REG
1490 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1491 {
1492 delete_insn (insn);
1493 /* Delete it from the reload chain */
1494 if (chain->prev)
1495 chain->prev->next = next;
1496 else
1497 reload_insn_chain = next;
1498 if (next)
1499 next->prev = chain->prev;
1500 chain->next = unused_insn_chains;
1501 unused_insn_chains = chain;
1502 continue;
1503 }
1504 }
1505 if (num_eliminable)
1506 update_eliminable_offsets ();
1507
1508 /* Remember for later shortcuts which insns had any reloads or
1509 register eliminations. */
1510 chain->need_elim = did_elimination;
1511 chain->need_reload = n_reloads > 0;
1512 chain->need_operand_change = operands_changed;
1513
1514 /* Discard any register replacements done. */
1515 if (did_elimination)
1516 {
1517 obstack_free (&reload_obstack, reload_insn_firstobj);
1518 PATTERN (insn) = old_body;
1519 INSN_CODE (insn) = old_code;
1520 REG_NOTES (insn) = old_notes;
1521 something_needs_elimination = 1;
1522 }
1523
1524 something_needs_operands_changed |= operands_changed;
1525
1526 if (n_reloads != 0)
1527 {
1528 copy_reloads (chain);
1529 *pprev_reload = chain;
1530 pprev_reload = &chain->next_need_reload;
1531 }
1532 }
1533 }
1534 *pprev_reload = 0;
1535}
1536
1537/* Comparison function for qsort to decide which of two reloads
1538 should be handled first. *P1 and *P2 are the reload numbers. */
1539
1540static int
1541reload_reg_class_lower (r1p, r2p)
1542 const PTR r1p;
1543 const PTR r2p;
1544{
1545 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1546 int t;
1547
1548 /* Consider required reloads before optional ones. */
1549 t = rld[r1].optional - rld[r2].optional;
1550 if (t != 0)
1551 return t;
1552
1553 /* Count all solitary classes before non-solitary ones. */
1554 t = ((reg_class_size[(int) rld[r2].class] == 1)
1555 - (reg_class_size[(int) rld[r1].class] == 1));
1556 if (t != 0)
1557 return t;
1558
1559 /* Aside from solitaires, consider all multi-reg groups first. */
1560 t = rld[r2].nregs - rld[r1].nregs;
1561 if (t != 0)
1562 return t;
1563
1564 /* Consider reloads in order of increasing reg-class number. */
1565 t = (int) rld[r1].class - (int) rld[r2].class;
1566 if (t != 0)
1567 return t;
1568
1569 /* If reloads are equally urgent, sort by reload number,
1570 so that the results of qsort leave nothing to chance. */
1571 return r1 - r2;
1572}
1573
1574/* The cost of spilling each hard reg. */
1575static int spill_cost[FIRST_PSEUDO_REGISTER];
1576
1577/* When spilling multiple hard registers, we use SPILL_COST for the first
1578 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1579 only the first hard reg for a multi-reg pseudo. */
1580static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1581
1582/* Update the spill cost arrays, considering that pseudo REG is live. */
1583
1584static void
1585count_pseudo (reg)
1586 int reg;
1587{
1588 int freq = REG_FREQ (reg);
1589 int r = reg_renumber[reg];
1590 int nregs;
1591
1592 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1593 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1594 return;
1595
1596 SET_REGNO_REG_SET (&pseudos_counted, reg);
1597
1598 if (r < 0)
1599 abort ();
1600
1601 spill_add_cost[r] += freq;
1602
1603 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1604 while (nregs-- > 0)
1605 spill_cost[r + nregs] += freq;
1606}
1607
1608/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1609 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1610
1611static void
1612order_regs_for_reload (chain)
1613 struct insn_chain *chain;
1614{
1615 int i;
1616 HARD_REG_SET used_by_pseudos;
1617 HARD_REG_SET used_by_pseudos2;
1618
1619 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1620
1621 memset (spill_cost, 0, sizeof spill_cost);
1622 memset (spill_add_cost, 0, sizeof spill_add_cost);
1623
1624 /* Count number of uses of each hard reg by pseudo regs allocated to it
1625 and then order them by decreasing use. First exclude hard registers
1626 that are live in or across this insn. */
1627
1628 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1629 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1630 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1631 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1632
1633 /* Now find out which pseudos are allocated to it, and update
1634 hard_reg_n_uses. */
1635 CLEAR_REG_SET (&pseudos_counted);
1636
1637 EXECUTE_IF_SET_IN_REG_SET
1638 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
1639 {
1640 count_pseudo (i);
1641 });
1642 EXECUTE_IF_SET_IN_REG_SET
1643 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
1644 {
1645 count_pseudo (i);
1646 });
1647 CLEAR_REG_SET (&pseudos_counted);
1648}
1649
1650/* Vector of reload-numbers showing the order in which the reloads should
1651 be processed. */
1652static short reload_order[MAX_RELOADS];
1653
1654/* This is used to keep track of the spill regs used in one insn. */
1655static HARD_REG_SET used_spill_regs_local;
1656
1657/* We decided to spill hard register SPILLED, which has a size of
1658 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1659 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1660 update SPILL_COST/SPILL_ADD_COST. */
1661
1662static void
1663count_spilled_pseudo (spilled, spilled_nregs, reg)
1664 int spilled, spilled_nregs, reg;
1665{
1666 int r = reg_renumber[reg];
1667 int nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1668
1669 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1670 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1671 return;
1672
1673 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1674
1675 spill_add_cost[r] -= REG_FREQ (reg);
1676 while (nregs-- > 0)
1677 spill_cost[r + nregs] -= REG_FREQ (reg);
1678}
1679
1680/* Find reload register to use for reload number ORDER. */
1681
1682static int
1683find_reg (chain, order)
1684 struct insn_chain *chain;
1685 int order;
1686{
1687 int rnum = reload_order[order];
1688 struct reload *rl = rld + rnum;
1689 int best_cost = INT_MAX;
1690 int best_reg = -1;
1691 unsigned int i, j;
1692 int k;
1693 HARD_REG_SET not_usable;
1694 HARD_REG_SET used_by_other_reload;
1695
1696 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1697 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1698 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1699
1700 CLEAR_HARD_REG_SET (used_by_other_reload);
1701 for (k = 0; k < order; k++)
1702 {
1703 int other = reload_order[k];
1704
1705 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1706 for (j = 0; j < rld[other].nregs; j++)
1707 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1708 }
1709
1710 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1711 {
1712 unsigned int regno = i;
1713
1714 if (! TEST_HARD_REG_BIT (not_usable, regno)
1715 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1716 && HARD_REGNO_MODE_OK (regno, rl->mode))
1717 {
1718 int this_cost = spill_cost[regno];
1719 int ok = 1;
1720 unsigned int this_nregs = HARD_REGNO_NREGS (regno, rl->mode);
1721
1722 for (j = 1; j < this_nregs; j++)
1723 {
1724 this_cost += spill_add_cost[regno + j];
1725 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1726 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1727 ok = 0;
1728 }
1729 if (! ok)
1730 continue;
1731 if (rl->in && GET_CODE (rl->in) == REG && REGNO (rl->in) == regno)
1732 this_cost--;
1733 if (rl->out && GET_CODE (rl->out) == REG && REGNO (rl->out) == regno)
1734 this_cost--;
1735 if (this_cost < best_cost
1736 /* Among registers with equal cost, prefer caller-saved ones, or
1737 use REG_ALLOC_ORDER if it is defined. */
1738 || (this_cost == best_cost
1739#ifdef REG_ALLOC_ORDER
1740 && (inv_reg_alloc_order[regno]
1741 < inv_reg_alloc_order[best_reg])
1742#else
1743 && call_used_regs[regno]
1744 && ! call_used_regs[best_reg]
1745#endif
1746 ))
1747 {
1748 best_reg = regno;
1749 best_cost = this_cost;
1750 }
1751 }
1752 }
1753 if (best_reg == -1)
1754 return 0;
1755
1756 if (rtl_dump_file)
1757 fprintf (rtl_dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1758
1759 rl->nregs = HARD_REGNO_NREGS (best_reg, rl->mode);
1760 rl->regno = best_reg;
1761
1762 EXECUTE_IF_SET_IN_REG_SET
1763 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j,
1764 {
1765 count_spilled_pseudo (best_reg, rl->nregs, j);
1766 });
1767
1768 EXECUTE_IF_SET_IN_REG_SET
1769 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j,
1770 {
1771 count_spilled_pseudo (best_reg, rl->nregs, j);
1772 });
1773
1774 for (i = 0; i < rl->nregs; i++)
1775 {
1776 if (spill_cost[best_reg + i] != 0
1777 || spill_add_cost[best_reg + i] != 0)
1778 abort ();
1779 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1780 }
1781 return 1;
1782}
1783
1784/* Find more reload regs to satisfy the remaining need of an insn, which
1785 is given by CHAIN.
1786 Do it by ascending class number, since otherwise a reg
1787 might be spilled for a big class and might fail to count
1788 for a smaller class even though it belongs to that class. */
1789
1790static void
1791find_reload_regs (chain)
1792 struct insn_chain *chain;
1793{
1794 int i;
1795
1796 /* In order to be certain of getting the registers we need,
1797 we must sort the reloads into order of increasing register class.
1798 Then our grabbing of reload registers will parallel the process
1799 that provided the reload registers. */
1800 for (i = 0; i < chain->n_reloads; i++)
1801 {
1802 /* Show whether this reload already has a hard reg. */
1803 if (chain->rld[i].reg_rtx)
1804 {
1805 int regno = REGNO (chain->rld[i].reg_rtx);
1806 chain->rld[i].regno = regno;
1807 chain->rld[i].nregs
1808 = HARD_REGNO_NREGS (regno, GET_MODE (chain->rld[i].reg_rtx));
1809 }
1810 else
1811 chain->rld[i].regno = -1;
1812 reload_order[i] = i;
1813 }
1814
1815 n_reloads = chain->n_reloads;
1816 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1817
1818 CLEAR_HARD_REG_SET (used_spill_regs_local);
1819
1820 if (rtl_dump_file)
1821 fprintf (rtl_dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1822
1823 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1824
1825 /* Compute the order of preference for hard registers to spill. */
1826
1827 order_regs_for_reload (chain);
1828
1829 for (i = 0; i < n_reloads; i++)
1830 {
1831 int r = reload_order[i];
1832
1833 /* Ignore reloads that got marked inoperative. */
1834 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1835 && ! rld[r].optional
1836 && rld[r].regno == -1)
1837 if (! find_reg (chain, i))
1838 {
1839 spill_failure (chain->insn, rld[r].class);
1840 failure = 1;
1841 return;
1842 }
1843 }
1844
1845 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1846 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1847
1848 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1849}
1850
1851static void
1852select_reload_regs ()
1853{
1854 struct insn_chain *chain;
1855
1856 /* Try to satisfy the needs for each insn. */
1857 for (chain = insns_need_reload; chain != 0;
1858 chain = chain->next_need_reload)
1859 find_reload_regs (chain);
1860}
1861
1862/* Delete all insns that were inserted by emit_caller_save_insns during
1863 this iteration. */
1864static void
1865delete_caller_save_insns ()
1866{
1867 struct insn_chain *c = reload_insn_chain;
1868
1869 while (c != 0)
1870 {
1871 while (c != 0 && c->is_caller_save_insn)
1872 {
1873 struct insn_chain *next = c->next;
1874 rtx insn = c->insn;
1875
1876 if (c == reload_insn_chain)
1877 reload_insn_chain = next;
1878 delete_insn (insn);
1879
1880 if (next)
1881 next->prev = c->prev;
1882 if (c->prev)
1883 c->prev->next = next;
1884 c->next = unused_insn_chains;
1885 unused_insn_chains = c;
1886 c = next;
1887 }
1888 if (c != 0)
1889 c = c->next;
1890 }
1891}
1892
1893/* Handle the failure to find a register to spill.
1894 INSN should be one of the insns which needed this particular spill reg. */
1895
1896static void
1897spill_failure (insn, class)
1898 rtx insn;
1899 enum reg_class class;
1900{
1901 static const char *const reg_class_names[] = REG_CLASS_NAMES;
1902 if (asm_noperands (PATTERN (insn)) >= 0)
1903 error_for_asm (insn, "can't find a register in class `%s' while reloading `asm'",
1904 reg_class_names[class]);
1905 else
1906 {
1907 error ("unable to find a register to spill in class `%s'",
1908 reg_class_names[class]);
1909 fatal_insn ("this is the insn:", insn);
1910 }
1911}
1912
1913/* Delete an unneeded INSN and any previous insns who sole purpose is loading
1914 data that is dead in INSN. */
1915
1916static void
1917delete_dead_insn (insn)
1918 rtx insn;
1919{
1920 rtx prev = prev_real_insn (insn);
1921 rtx prev_dest;
1922
1923 /* If the previous insn sets a register that dies in our insn, delete it
1924 too. */
1925 if (prev && GET_CODE (PATTERN (prev)) == SET
1926 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
1927 && reg_mentioned_p (prev_dest, PATTERN (insn))
1928 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1929 && ! side_effects_p (SET_SRC (PATTERN (prev))))
1930 delete_dead_insn (prev);
1931
1932 PUT_CODE (insn, NOTE);
1933 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1934 NOTE_SOURCE_FILE (insn) = 0;
1935}
1936
1937/* Modify the home of pseudo-reg I.
1938 The new home is present in reg_renumber[I].
1939
1940 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1941 or it may be -1, meaning there is none or it is not relevant.
1942 This is used so that all pseudos spilled from a given hard reg
1943 can share one stack slot. */
1944
1945static void
1946alter_reg (i, from_reg)
1947 int i;
1948 int from_reg;
1949{
1950 /* When outputting an inline function, this can happen
1951 for a reg that isn't actually used. */
1952 if (regno_reg_rtx[i] == 0)
1953 return;
1954
1955 /* If the reg got changed to a MEM at rtl-generation time,
1956 ignore it. */
1957 if (GET_CODE (regno_reg_rtx[i]) != REG)
1958 return;
1959
1960 /* Modify the reg-rtx to contain the new hard reg
1961 number or else to contain its pseudo reg number. */
1962 REGNO (regno_reg_rtx[i])
1963 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1964
1965 /* If we have a pseudo that is needed but has no hard reg or equivalent,
1966 allocate a stack slot for it. */
1967
1968 if (reg_renumber[i] < 0
1969 && REG_N_REFS (i) > 0
1970 && reg_equiv_constant[i] == 0
1971 && reg_equiv_memory_loc[i] == 0)
1972 {
1973 rtx x;
1974 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
1975 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
1976 int adjust = 0;
1977
1978 /* Each pseudo reg has an inherent size which comes from its own mode,
1979 and a total size which provides room for paradoxical subregs
1980 which refer to the pseudo reg in wider modes.
1981
1982 We can use a slot already allocated if it provides both
1983 enough inherent space and enough total space.
1984 Otherwise, we allocate a new slot, making sure that it has no less
1985 inherent space, and no less total space, then the previous slot. */
1986 if (from_reg == -1)
1987 {
1988 /* No known place to spill from => no slot to reuse. */
1989 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
1990 inherent_size == total_size ? 0 : -1);
1991 if (BYTES_BIG_ENDIAN)
1992 /* Cancel the big-endian correction done in assign_stack_local.
1993 Get the address of the beginning of the slot.
1994 This is so we can do a big-endian correction unconditionally
1995 below. */
1996 adjust = inherent_size - total_size;
1997
1998 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
1999
2000 /* Nothing can alias this slot except this pseudo. */
2001 set_mem_alias_set (x, new_alias_set ());
2002 }
2003
2004 /* Reuse a stack slot if possible. */
2005 else if (spill_stack_slot[from_reg] != 0
2006 && spill_stack_slot_width[from_reg] >= total_size
2007 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2008 >= inherent_size))
2009 x = spill_stack_slot[from_reg];
2010
2011 /* Allocate a bigger slot. */
2012 else
2013 {
2014 /* Compute maximum size needed, both for inherent size
2015 and for total size. */
2016 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2017 rtx stack_slot;
2018
2019 if (spill_stack_slot[from_reg])
2020 {
2021 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2022 > inherent_size)
2023 mode = GET_MODE (spill_stack_slot[from_reg]);
2024 if (spill_stack_slot_width[from_reg] > total_size)
2025 total_size = spill_stack_slot_width[from_reg];
2026 }
2027
2028 /* Make a slot with that size. */
2029 x = assign_stack_local (mode, total_size,
2030 inherent_size == total_size ? 0 : -1);
2031 stack_slot = x;
2032
2033 /* All pseudos mapped to this slot can alias each other. */
2034 if (spill_stack_slot[from_reg])
2035 set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2036 else
2037 set_mem_alias_set (x, new_alias_set ());
2038
2039 if (BYTES_BIG_ENDIAN)
2040 {
2041 /* Cancel the big-endian correction done in assign_stack_local.
2042 Get the address of the beginning of the slot.
2043 This is so we can do a big-endian correction unconditionally
2044 below. */
2045 adjust = GET_MODE_SIZE (mode) - total_size;
2046 if (adjust)
2047 stack_slot
2048 = adjust_address_nv (x, mode_for_size (total_size
2049 * BITS_PER_UNIT,
2050 MODE_INT, 1),
2051 adjust);
2052 }
2053
2054 spill_stack_slot[from_reg] = stack_slot;
2055 spill_stack_slot_width[from_reg] = total_size;
2056 }
2057
2058 /* On a big endian machine, the "address" of the slot
2059 is the address of the low part that fits its inherent mode. */
2060 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2061 adjust += (total_size - inherent_size);
2062
2063 /* If we have any adjustment to make, or if the stack slot is the
2064 wrong mode, make a new stack slot. */
2065 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2066
2067 /* If we have a decl for the original register, set it for the
2068 memory. If this is a shared MEM, make a copy. */
2069 if (REGNO_DECL (i))
2070 {
2071 rtx decl = DECL_RTL_IF_SET (REGNO_DECL (i));
2072
2073 /* We can do this only for the DECLs home pseudo, not for
2074 any copies of it, since otherwise when the stack slot
2075 is reused, nonoverlapping_memrefs_p might think they
2076 cannot overlap. */
2077 if (decl && GET_CODE (decl) == REG && REGNO (decl) == (unsigned) i)
2078 {
2079 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2080 x = copy_rtx (x);
2081
2082 set_mem_expr (x, REGNO_DECL (i));
2083 }
2084 }
2085
2086 /* Save the stack slot for later. */
2087 reg_equiv_memory_loc[i] = x;
2088 }
2089}
2090
2091/* Mark the slots in regs_ever_live for the hard regs
2092 used by pseudo-reg number REGNO. */
2093
2094void
2095mark_home_live (regno)
2096 int regno;
2097{
2098 int i, lim;
2099
2100 i = reg_renumber[regno];
2101 if (i < 0)
2102 return;
2103 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2104 while (i < lim)
2105 regs_ever_live[i++] = 1;
2106}
2107
2108/* This function handles the tracking of elimination offsets around branches.
2109
2110 X is a piece of RTL being scanned.
2111
2112 INSN is the insn that it came from, if any.
2113
2114 INITIAL_P is non-zero if we are to set the offset to be the initial
2115 offset and zero if we are setting the offset of the label to be the
2116 current offset. */
2117
2118static void
2119set_label_offsets (x, insn, initial_p)
2120 rtx x;
2121 rtx insn;
2122 int initial_p;
2123{
2124 enum rtx_code code = GET_CODE (x);
2125 rtx tem;
2126 unsigned int i;
2127 struct elim_table *p;
2128
2129 switch (code)
2130 {
2131 case LABEL_REF:
2132 if (LABEL_REF_NONLOCAL_P (x))
2133 return;
2134
2135 x = XEXP (x, 0);
2136
2137 /* ... fall through ... */
2138
2139 case CODE_LABEL:
2140 /* If we know nothing about this label, set the desired offsets. Note
2141 that this sets the offset at a label to be the offset before a label
2142 if we don't know anything about the label. This is not correct for
2143 the label after a BARRIER, but is the best guess we can make. If
2144 we guessed wrong, we will suppress an elimination that might have
2145 been possible had we been able to guess correctly. */
2146
2147 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2148 {
2149 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2150 offsets_at[CODE_LABEL_NUMBER (x)][i]
2151 = (initial_p ? reg_eliminate[i].initial_offset
2152 : reg_eliminate[i].offset);
2153 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2154 }
2155
2156 /* Otherwise, if this is the definition of a label and it is
2157 preceded by a BARRIER, set our offsets to the known offset of
2158 that label. */
2159
2160 else if (x == insn
2161 && (tem = prev_nonnote_insn (insn)) != 0
2162 && GET_CODE (tem) == BARRIER)
2163 set_offsets_for_label (insn);
2164 else
2165 /* If neither of the above cases is true, compare each offset
2166 with those previously recorded and suppress any eliminations
2167 where the offsets disagree. */
2168
2169 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2170 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2171 != (initial_p ? reg_eliminate[i].initial_offset
2172 : reg_eliminate[i].offset))
2173 reg_eliminate[i].can_eliminate = 0;
2174
2175 return;
2176
2177 case JUMP_INSN:
2178 set_label_offsets (PATTERN (insn), insn, initial_p);
2179
2180 /* ... fall through ... */
2181
2182 case INSN:
2183 case CALL_INSN:
2184 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2185 and hence must have all eliminations at their initial offsets. */
2186 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2187 if (REG_NOTE_KIND (tem) == REG_LABEL)
2188 set_label_offsets (XEXP (tem, 0), insn, 1);
2189 return;
2190
2191 case PARALLEL:
2192 case ADDR_VEC:
2193 case ADDR_DIFF_VEC:
2194 /* Each of the labels in the parallel or address vector must be
2195 at their initial offsets. We want the first field for PARALLEL
2196 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2197
2198 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2199 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2200 insn, initial_p);
2201 return;
2202
2203 case SET:
2204 /* We only care about setting PC. If the source is not RETURN,
2205 IF_THEN_ELSE, or a label, disable any eliminations not at
2206 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2207 isn't one of those possibilities. For branches to a label,
2208 call ourselves recursively.
2209
2210 Note that this can disable elimination unnecessarily when we have
2211 a non-local goto since it will look like a non-constant jump to
2212 someplace in the current function. This isn't a significant
2213 problem since such jumps will normally be when all elimination
2214 pairs are back to their initial offsets. */
2215
2216 if (SET_DEST (x) != pc_rtx)
2217 return;
2218
2219 switch (GET_CODE (SET_SRC (x)))
2220 {
2221 case PC:
2222 case RETURN:
2223 return;
2224
2225 case LABEL_REF:
2226 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2227 return;
2228
2229 case IF_THEN_ELSE:
2230 tem = XEXP (SET_SRC (x), 1);
2231 if (GET_CODE (tem) == LABEL_REF)
2232 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2233 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2234 break;
2235
2236 tem = XEXP (SET_SRC (x), 2);
2237 if (GET_CODE (tem) == LABEL_REF)
2238 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2239 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2240 break;
2241 return;
2242
2243 default:
2244 break;
2245 }
2246
2247 /* If we reach here, all eliminations must be at their initial
2248 offset because we are doing a jump to a variable address. */
2249 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2250 if (p->offset != p->initial_offset)
2251 p->can_eliminate = 0;
2252 break;
2253
2254 default:
2255 break;
2256 }
2257}
2258
2259/* Scan X and replace any eliminable registers (such as fp) with a
2260 replacement (such as sp), plus an offset.
2261
2262 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2263 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2264 MEM, we are allowed to replace a sum of a register and the constant zero
2265 with the register, which we cannot do outside a MEM. In addition, we need
2266 to record the fact that a register is referenced outside a MEM.
2267
2268 If INSN is an insn, it is the insn containing X. If we replace a REG
2269 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2270 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2271 the REG is being modified.
2272
2273 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2274 That's used when we eliminate in expressions stored in notes.
2275 This means, do not set ref_outside_mem even if the reference
2276 is outside of MEMs.
2277
2278 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2279 replacements done assuming all offsets are at their initial values. If
2280 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2281 encounter, return the actual location so that find_reloads will do
2282 the proper thing. */
2283
2284rtx
2285eliminate_regs (x, mem_mode, insn)
2286 rtx x;
2287 enum machine_mode mem_mode;
2288 rtx insn;
2289{
2290 enum rtx_code code = GET_CODE (x);
2291 struct elim_table *ep;
2292 int regno;
2293 rtx new;
2294 int i, j;
2295 const char *fmt;
2296 int copied = 0;
2297
2298 if (! current_function_decl)
2299 return x;
2300
2301 switch (code)
2302 {
2303 case CONST_INT:
2304 case CONST_DOUBLE:
2305 case CONST_VECTOR:
2306 case CONST:
2307 case SYMBOL_REF:
2308 case CODE_LABEL:
2309 case PC:
2310 case CC0:
2311 case ASM_INPUT:
2312 case ADDR_VEC:
2313 case ADDR_DIFF_VEC:
2314 case RETURN:
2315 return x;
2316
2317 case ADDRESSOF:
2318 /* This is only for the benefit of the debugging backends, which call
2319 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2320 removed after CSE. */
2321 new = eliminate_regs (XEXP (x, 0), 0, insn);
2322 if (GET_CODE (new) == MEM)
2323 return XEXP (new, 0);
2324 return x;
2325
2326 case REG:
2327 regno = REGNO (x);
2328
2329 /* First handle the case where we encounter a bare register that
2330 is eliminable. Replace it with a PLUS. */
2331 if (regno < FIRST_PSEUDO_REGISTER)
2332 {
2333 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2334 ep++)
2335 if (ep->from_rtx == x && ep->can_eliminate)
2336 return plus_constant (ep->to_rtx, ep->previous_offset);
2337
2338 }
2339 else if (reg_renumber && reg_renumber[regno] < 0
2340 && reg_equiv_constant && reg_equiv_constant[regno]
2341 && ! CONSTANT_P (reg_equiv_constant[regno]))
2342 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2343 mem_mode, insn);
2344 return x;
2345
2346 /* You might think handling MINUS in a manner similar to PLUS is a
2347 good idea. It is not. It has been tried multiple times and every
2348 time the change has had to have been reverted.
2349
2350 Other parts of reload know a PLUS is special (gen_reload for example)
2351 and require special code to handle code a reloaded PLUS operand.
2352
2353 Also consider backends where the flags register is clobbered by a
2354 MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2355 lea instruction comes to mind). If we try to reload a MINUS, we
2356 may kill the flags register that was holding a useful value.
2357
2358 So, please before trying to handle MINUS, consider reload as a
2359 whole instead of this little section as well as the backend issues. */
2360 case PLUS:
2361 /* If this is the sum of an eliminable register and a constant, rework
2362 the sum. */
2363 if (GET_CODE (XEXP (x, 0)) == REG
2364 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2365 && CONSTANT_P (XEXP (x, 1)))
2366 {
2367 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2368 ep++)
2369 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2370 {
2371 /* The only time we want to replace a PLUS with a REG (this
2372 occurs when the constant operand of the PLUS is the negative
2373 of the offset) is when we are inside a MEM. We won't want
2374 to do so at other times because that would change the
2375 structure of the insn in a way that reload can't handle.
2376 We special-case the commonest situation in
2377 eliminate_regs_in_insn, so just replace a PLUS with a
2378 PLUS here, unless inside a MEM. */
2379 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2380 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2381 return ep->to_rtx;
2382 else
2383 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2384 plus_constant (XEXP (x, 1),
2385 ep->previous_offset));
2386 }
2387
2388 /* If the register is not eliminable, we are done since the other
2389 operand is a constant. */
2390 return x;
2391 }
2392
2393 /* If this is part of an address, we want to bring any constant to the
2394 outermost PLUS. We will do this by doing register replacement in
2395 our operands and seeing if a constant shows up in one of them.
2396
2397 Note that there is no risk of modifying the structure of the insn,
2398 since we only get called for its operands, thus we are either
2399 modifying the address inside a MEM, or something like an address
2400 operand of a load-address insn. */
2401
2402 {
2403 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2404 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2405
2406 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2407 {
2408 /* If one side is a PLUS and the other side is a pseudo that
2409 didn't get a hard register but has a reg_equiv_constant,
2410 we must replace the constant here since it may no longer
2411 be in the position of any operand. */
2412 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2413 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2414 && reg_renumber[REGNO (new1)] < 0
2415 && reg_equiv_constant != 0
2416 && reg_equiv_constant[REGNO (new1)] != 0)
2417 new1 = reg_equiv_constant[REGNO (new1)];
2418 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2419 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2420 && reg_renumber[REGNO (new0)] < 0
2421 && reg_equiv_constant[REGNO (new0)] != 0)
2422 new0 = reg_equiv_constant[REGNO (new0)];
2423
2424 new = form_sum (new0, new1);
2425
2426 /* As above, if we are not inside a MEM we do not want to
2427 turn a PLUS into something else. We might try to do so here
2428 for an addition of 0 if we aren't optimizing. */
2429 if (! mem_mode && GET_CODE (new) != PLUS)
2430 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2431 else
2432 return new;
2433 }
2434 }
2435 return x;
2436
2437 case MULT:
2438 /* If this is the product of an eliminable register and a
2439 constant, apply the distribute law and move the constant out
2440 so that we have (plus (mult ..) ..). This is needed in order
2441 to keep load-address insns valid. This case is pathological.
2442 We ignore the possibility of overflow here. */
2443 if (GET_CODE (XEXP (x, 0)) == REG
2444 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2445 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2446 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2447 ep++)
2448 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2449 {
2450 if (! mem_mode
2451 /* Refs inside notes don't count for this purpose. */
2452 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2453 || GET_CODE (insn) == INSN_LIST)))
2454 ep->ref_outside_mem = 1;
2455
2456 return
2457 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2458 ep->previous_offset * INTVAL (XEXP (x, 1)));
2459 }
2460
2461 /* ... fall through ... */
2462
2463 case CALL:
2464 case COMPARE:
2465 /* See comments before PLUS about handling MINUS. */
2466 case MINUS:
2467 case DIV: case UDIV:
2468 case MOD: case UMOD:
2469 case AND: case IOR: case XOR:
2470 case ROTATERT: case ROTATE:
2471 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2472 case NE: case EQ:
2473 case GE: case GT: case GEU: case GTU:
2474 case LE: case LT: case LEU: case LTU:
2475 {
2476 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2477 rtx new1
2478 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2479
2480 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2481 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2482 }
2483 return x;
2484
2485 case EXPR_LIST:
2486 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2487 if (XEXP (x, 0))
2488 {
2489 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2490 if (new != XEXP (x, 0))
2491 {
2492 /* If this is a REG_DEAD note, it is not valid anymore.
2493 Using the eliminated version could result in creating a
2494 REG_DEAD note for the stack or frame pointer. */
2495 if (GET_MODE (x) == REG_DEAD)
2496 return (XEXP (x, 1)
2497 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2498 : NULL_RTX);
2499
2500 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2501 }
2502 }
2503
2504 /* ... fall through ... */
2505
2506 case INSN_LIST:
2507 /* Now do eliminations in the rest of the chain. If this was
2508 an EXPR_LIST, this might result in allocating more memory than is
2509 strictly needed, but it simplifies the code. */
2510 if (XEXP (x, 1))
2511 {
2512 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2513 if (new != XEXP (x, 1))
2514 return
2515 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2516 }
2517 return x;
2518
2519 case PRE_INC:
2520 case POST_INC:
2521 case PRE_DEC:
2522 case POST_DEC:
2523 case STRICT_LOW_PART:
2524 case NEG: case NOT:
2525 case SIGN_EXTEND: case ZERO_EXTEND:
2526 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2527 case FLOAT: case FIX:
2528 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2529 case ABS:
2530 case SQRT:
2531 case FFS:
2532 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2533 if (new != XEXP (x, 0))
2534 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2535 return x;
2536
2537 case SUBREG:
2538 /* Similar to above processing, but preserve SUBREG_BYTE.
2539 Convert (subreg (mem)) to (mem) if not paradoxical.
2540 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2541 pseudo didn't get a hard reg, we must replace this with the
2542 eliminated version of the memory location because push_reloads
2543 may do the replacement in certain circumstances. */
2544 if (GET_CODE (SUBREG_REG (x)) == REG
2545 && (GET_MODE_SIZE (GET_MODE (x))
2546 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2547 && reg_equiv_memory_loc != 0
2548 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2549 {
2550 new = SUBREG_REG (x);
2551 }
2552 else
2553 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2554
2555 if (new != SUBREG_REG (x))
2556 {
2557 int x_size = GET_MODE_SIZE (GET_MODE (x));
2558 int new_size = GET_MODE_SIZE (GET_MODE (new));
2559
2560 if (GET_CODE (new) == MEM
2561 && ((x_size < new_size
2562#ifdef WORD_REGISTER_OPERATIONS
2563 /* On these machines, combine can create rtl of the form
2564 (set (subreg:m1 (reg:m2 R) 0) ...)
2565 where m1 < m2, and expects something interesting to
2566 happen to the entire word. Moreover, it will use the
2567 (reg:m2 R) later, expecting all bits to be preserved.
2568 So if the number of words is the same, preserve the
2569 subreg so that push_reloads can see it. */
2570 && ! ((x_size - 1) / UNITS_PER_WORD
2571 == (new_size -1 ) / UNITS_PER_WORD)
2572#endif
2573 )
2574 || x_size == new_size)
2575 )
2576 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2577 else
2578 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2579 }
2580
2581 return x;
2582
2583 case MEM:
2584 /* This is only for the benefit of the debugging backends, which call
2585 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2586 removed after CSE. */
2587 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2588 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
2589
2590 /* Our only special processing is to pass the mode of the MEM to our
2591 recursive call and copy the flags. While we are here, handle this
2592 case more efficiently. */
2593 return
2594 replace_equiv_address_nv (x,
2595 eliminate_regs (XEXP (x, 0),
2596 GET_MODE (x), insn));
2597
2598 case USE:
2599 /* Handle insn_list USE that a call to a pure function may generate. */
2600 new = eliminate_regs (XEXP (x, 0), 0, insn);
2601 if (new != XEXP (x, 0))
2602 return gen_rtx_USE (GET_MODE (x), new);
2603 return x;
2604
2605 case CLOBBER:
2606 case ASM_OPERANDS:
2607 case SET:
2608 abort ();
2609
2610 default:
2611 break;
2612 }
2613
2614 /* Process each of our operands recursively. If any have changed, make a
2615 copy of the rtx. */
2616 fmt = GET_RTX_FORMAT (code);
2617 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2618 {
2619 if (*fmt == 'e')
2620 {
2621 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
2622 if (new != XEXP (x, i) && ! copied)
2623 {
2624 rtx new_x = rtx_alloc (code);
2625 memcpy (new_x, x,
2626 (sizeof (*new_x) - sizeof (new_x->fld)
2627 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
2628 x = new_x;
2629 copied = 1;
2630 }
2631 XEXP (x, i) = new;
2632 }
2633 else if (*fmt == 'E')
2634 {
2635 int copied_vec = 0;
2636 for (j = 0; j < XVECLEN (x, i); j++)
2637 {
2638 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2639 if (new != XVECEXP (x, i, j) && ! copied_vec)
2640 {
2641 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2642 XVEC (x, i)->elem);
2643 if (! copied)
2644 {
2645 rtx new_x = rtx_alloc (code);
2646 memcpy (new_x, x,
2647 (sizeof (*new_x) - sizeof (new_x->fld)
2648 + (sizeof (new_x->fld[0])
2649 * GET_RTX_LENGTH (code))));
2650 x = new_x;
2651 copied = 1;
2652 }
2653 XVEC (x, i) = new_v;
2654 copied_vec = 1;
2655 }
2656 XVECEXP (x, i, j) = new;
2657 }
2658 }
2659 }
2660
2661 return x;
2662}
2663
2664/* Scan rtx X for modifications of elimination target registers. Update
2665 the table of eliminables to reflect the changed state. MEM_MODE is
2666 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2667
2668static void
2669elimination_effects (x, mem_mode)
2670 rtx x;
2671 enum machine_mode mem_mode;
2672
2673{
2674 enum rtx_code code = GET_CODE (x);
2675 struct elim_table *ep;
2676 int regno;
2677 int i, j;
2678 const char *fmt;
2679
2680 switch (code)
2681 {
2682 case CONST_INT:
2683 case CONST_DOUBLE:
2684 case CONST_VECTOR:
2685 case CONST:
2686 case SYMBOL_REF:
2687 case CODE_LABEL:
2688 case PC:
2689 case CC0:
2690 case ASM_INPUT:
2691 case ADDR_VEC:
2692 case ADDR_DIFF_VEC:
2693 case RETURN:
2694 return;
2695
2696 case ADDRESSOF:
2697 abort ();
2698
2699 case REG:
2700 regno = REGNO (x);
2701
2702 /* First handle the case where we encounter a bare register that
2703 is eliminable. Replace it with a PLUS. */
2704 if (regno < FIRST_PSEUDO_REGISTER)
2705 {
2706 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2707 ep++)
2708 if (ep->from_rtx == x && ep->can_eliminate)
2709 {
2710 if (! mem_mode)
2711 ep->ref_outside_mem = 1;
2712 return;
2713 }
2714
2715 }
2716 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2717 && reg_equiv_constant[regno]
2718 && ! function_invariant_p (reg_equiv_constant[regno]))
2719 elimination_effects (reg_equiv_constant[regno], mem_mode);
2720 return;
2721
2722 case PRE_INC:
2723 case POST_INC:
2724 case PRE_DEC:
2725 case POST_DEC:
2726 case POST_MODIFY:
2727 case PRE_MODIFY:
2728 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2729 if (ep->to_rtx == XEXP (x, 0))
2730 {
2731 int size = GET_MODE_SIZE (mem_mode);
2732
2733 /* If more bytes than MEM_MODE are pushed, account for them. */
2734#ifdef PUSH_ROUNDING
2735 if (ep->to_rtx == stack_pointer_rtx)
2736 size = PUSH_ROUNDING (size);
2737#endif
2738 if (code == PRE_DEC || code == POST_DEC)
2739 ep->offset += size;
2740 else if (code == PRE_INC || code == POST_INC)
2741 ep->offset -= size;
2742 else if ((code == PRE_MODIFY || code == POST_MODIFY)
2743 && GET_CODE (XEXP (x, 1)) == PLUS
2744 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2745 && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2746 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2747 }
2748
2749 /* These two aren't unary operators. */
2750 if (code == POST_MODIFY || code == PRE_MODIFY)
2751 break;
2752
2753 /* Fall through to generic unary operation case. */
2754 case STRICT_LOW_PART:
2755 case NEG: case NOT:
2756 case SIGN_EXTEND: case ZERO_EXTEND:
2757 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2758 case FLOAT: case FIX:
2759 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2760 case ABS:
2761 case SQRT:
2762 case FFS:
2763 elimination_effects (XEXP (x, 0), mem_mode);
2764 return;
2765
2766 case SUBREG:
2767 if (GET_CODE (SUBREG_REG (x)) == REG
2768 && (GET_MODE_SIZE (GET_MODE (x))
2769 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2770 && reg_equiv_memory_loc != 0
2771 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2772 return;
2773
2774 elimination_effects (SUBREG_REG (x), mem_mode);
2775 return;
2776
2777 case USE:
2778 /* If using a register that is the source of an eliminate we still
2779 think can be performed, note it cannot be performed since we don't
2780 know how this register is used. */
2781 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2782 if (ep->from_rtx == XEXP (x, 0))
2783 ep->can_eliminate = 0;
2784
2785 elimination_effects (XEXP (x, 0), mem_mode);
2786 return;
2787
2788 case CLOBBER:
2789 /* If clobbering a register that is the replacement register for an
2790 elimination we still think can be performed, note that it cannot
2791 be performed. Otherwise, we need not be concerned about it. */
2792 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2793 if (ep->to_rtx == XEXP (x, 0))
2794 ep->can_eliminate = 0;
2795
2796 elimination_effects (XEXP (x, 0), mem_mode);
2797 return;
2798
2799 case SET:
2800 /* Check for setting a register that we know about. */
2801 if (GET_CODE (SET_DEST (x)) == REG)
2802 {
2803 /* See if this is setting the replacement register for an
2804 elimination.
2805
2806 If DEST is the hard frame pointer, we do nothing because we
2807 assume that all assignments to the frame pointer are for
2808 non-local gotos and are being done at a time when they are valid
2809 and do not disturb anything else. Some machines want to
2810 eliminate a fake argument pointer (or even a fake frame pointer)
2811 with either the real frame or the stack pointer. Assignments to
2812 the hard frame pointer must not prevent this elimination. */
2813
2814 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2815 ep++)
2816 if (ep->to_rtx == SET_DEST (x)
2817 && SET_DEST (x) != hard_frame_pointer_rtx)
2818 {
2819 /* If it is being incremented, adjust the offset. Otherwise,
2820 this elimination can't be done. */
2821 rtx src = SET_SRC (x);
2822
2823 if (GET_CODE (src) == PLUS
2824 && XEXP (src, 0) == SET_DEST (x)
2825 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2826 ep->offset -= INTVAL (XEXP (src, 1));
2827 else
2828 ep->can_eliminate = 0;
2829 }
2830 }
2831
2832 elimination_effects (SET_DEST (x), 0);
2833 elimination_effects (SET_SRC (x), 0);
2834 return;
2835
2836 case MEM:
2837 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2838 abort ();
2839
2840 /* Our only special processing is to pass the mode of the MEM to our
2841 recursive call. */
2842 elimination_effects (XEXP (x, 0), GET_MODE (x));
2843 return;
2844
2845 default:
2846 break;
2847 }
2848
2849 fmt = GET_RTX_FORMAT (code);
2850 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2851 {
2852 if (*fmt == 'e')
2853 elimination_effects (XEXP (x, i), mem_mode);
2854 else if (*fmt == 'E')
2855 for (j = 0; j < XVECLEN (x, i); j++)
2856 elimination_effects (XVECEXP (x, i, j), mem_mode);
2857 }
2858}
2859
2860/* Descend through rtx X and verify that no references to eliminable registers
2861 remain. If any do remain, mark the involved register as not
2862 eliminable. */
2863
2864static void
2865check_eliminable_occurrences (x)
2866 rtx x;
2867{
2868 const char *fmt;
2869 int i;
2870 enum rtx_code code;
2871
2872 if (x == 0)
2873 return;
2874
2875 code = GET_CODE (x);
2876
2877 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2878 {
2879 struct elim_table *ep;
2880
2881 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2882 if (ep->from_rtx == x && ep->can_eliminate)
2883 ep->can_eliminate = 0;
2884 return;
2885 }
2886
2887 fmt = GET_RTX_FORMAT (code);
2888 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2889 {
2890 if (*fmt == 'e')
2891 check_eliminable_occurrences (XEXP (x, i));
2892 else if (*fmt == 'E')
2893 {
2894 int j;
2895 for (j = 0; j < XVECLEN (x, i); j++)
2896 check_eliminable_occurrences (XVECEXP (x, i, j));
2897 }
2898 }
2899}
2900
2901/* Scan INSN and eliminate all eliminable registers in it.
2902
2903 If REPLACE is nonzero, do the replacement destructively. Also
2904 delete the insn as dead it if it is setting an eliminable register.
2905
2906 If REPLACE is zero, do all our allocations in reload_obstack.
2907
2908 If no eliminations were done and this insn doesn't require any elimination
2909 processing (these are not identical conditions: it might be updating sp,
2910 but not referencing fp; this needs to be seen during reload_as_needed so
2911 that the offset between fp and sp can be taken into consideration), zero
2912 is returned. Otherwise, 1 is returned. */
2913
2914static int
2915eliminate_regs_in_insn (insn, replace)
2916 rtx insn;
2917 int replace;
2918{
2919 int icode = recog_memoized (insn);
2920 rtx old_body = PATTERN (insn);
2921 int insn_is_asm = asm_noperands (old_body) >= 0;
2922 rtx old_set = single_set (insn);
2923 rtx new_body;
2924 int val = 0;
2925 int i, any_changes;
2926 rtx substed_operand[MAX_RECOG_OPERANDS];
2927 rtx orig_operand[MAX_RECOG_OPERANDS];
2928 struct elim_table *ep;
2929
2930 if (! insn_is_asm && icode < 0)
2931 {
2932 if (GET_CODE (PATTERN (insn)) == USE
2933 || GET_CODE (PATTERN (insn)) == CLOBBER
2934 || GET_CODE (PATTERN (insn)) == ADDR_VEC
2935 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2936 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
2937 return 0;
2938 abort ();
2939 }
2940
2941 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
2942 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2943 {
2944 /* Check for setting an eliminable register. */
2945 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2946 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2947 {
2948#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2949 /* If this is setting the frame pointer register to the
2950 hardware frame pointer register and this is an elimination
2951 that will be done (tested above), this insn is really
2952 adjusting the frame pointer downward to compensate for
2953 the adjustment done before a nonlocal goto. */
2954 if (ep->from == FRAME_POINTER_REGNUM
2955 && ep->to == HARD_FRAME_POINTER_REGNUM)
2956 {
2957 rtx base = SET_SRC (old_set);
2958 rtx base_insn = insn;
2959 int offset = 0;
2960
2961 while (base != ep->to_rtx)
2962 {
2963 rtx prev_insn, prev_set;
2964
2965 if (GET_CODE (base) == PLUS
2966 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2967 {
2968 offset += INTVAL (XEXP (base, 1));
2969 base = XEXP (base, 0);
2970 }
2971 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2972 && (prev_set = single_set (prev_insn)) != 0
2973 && rtx_equal_p (SET_DEST (prev_set), base))
2974 {
2975 base = SET_SRC (prev_set);
2976 base_insn = prev_insn;
2977 }
2978 else
2979 break;
2980 }
2981
2982 if (base == ep->to_rtx)
2983 {
2984 rtx src
2985 = plus_constant (ep->to_rtx, offset - ep->offset);
2986
2987 new_body = old_body;
2988 if (! replace)
2989 {
2990 new_body = copy_insn (old_body);
2991 if (REG_NOTES (insn))
2992 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
2993 }
2994 PATTERN (insn) = new_body;
2995 old_set = single_set (insn);
2996
2997 /* First see if this insn remains valid when we
2998 make the change. If not, keep the INSN_CODE
2999 the same and let reload fit it up. */
3000 validate_change (insn, &SET_SRC (old_set), src, 1);
3001 validate_change (insn, &SET_DEST (old_set),
3002 ep->to_rtx, 1);
3003 if (! apply_change_group ())
3004 {
3005 SET_SRC (old_set) = src;
3006 SET_DEST (old_set) = ep->to_rtx;
3007 }
3008
3009 val = 1;
3010 goto done;
3011 }
3012 }
3013#endif
3014
3015 /* In this case this insn isn't serving a useful purpose. We
3016 will delete it in reload_as_needed once we know that this
3017 elimination is, in fact, being done.
3018
3019 If REPLACE isn't set, we can't delete this insn, but needn't
3020 process it since it won't be used unless something changes. */
3021 if (replace)
3022 {
3023 delete_dead_insn (insn);
3024 return 1;
3025 }
3026 val = 1;
3027 goto done;
3028 }
3029 }
3030
3031 /* We allow one special case which happens to work on all machines we
3032 currently support: a single set with the source being a PLUS of an
3033 eliminable register and a constant. */
3034 if (old_set
3035 && GET_CODE (SET_DEST (old_set)) == REG
3036 && GET_CODE (SET_SRC (old_set)) == PLUS
3037 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3038 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
3039 && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
3040 {
3041 rtx reg = XEXP (SET_SRC (old_set), 0);
3042 int offset = INTVAL (XEXP (SET_SRC (old_set), 1));
3043
3044 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3045 if (ep->from_rtx == reg && ep->can_eliminate)
3046 {
3047 offset += ep->offset;
3048
3049 if (offset == 0)
3050 {
3051 int num_clobbers;
3052 /* We assume here that if we need a PARALLEL with
3053 CLOBBERs for this assignment, we can do with the
3054 MATCH_SCRATCHes that add_clobbers allocates.
3055 There's not much we can do if that doesn't work. */
3056 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3057 SET_DEST (old_set),
3058 ep->to_rtx);
3059 num_clobbers = 0;
3060 INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
3061 if (num_clobbers)
3062 {
3063 rtvec vec = rtvec_alloc (num_clobbers + 1);
3064
3065 vec->elem[0] = PATTERN (insn);
3066 PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
3067 add_clobbers (PATTERN (insn), INSN_CODE (insn));
3068 }
3069 if (INSN_CODE (insn) < 0)
3070 abort ();
3071 }
3072 else
3073 {
3074 new_body = old_body;
3075 if (! replace)
3076 {
3077 new_body = copy_insn (old_body);
3078 if (REG_NOTES (insn))
3079 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3080 }
3081 PATTERN (insn) = new_body;
3082 old_set = single_set (insn);
3083
3084 XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
3085 XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
3086 }
3087 val = 1;
3088 /* This can't have an effect on elimination offsets, so skip right
3089 to the end. */
3090 goto done;
3091 }
3092 }
3093
3094 /* Determine the effects of this insn on elimination offsets. */
3095 elimination_effects (old_body, 0);
3096
3097 /* Eliminate all eliminable registers occurring in operands that
3098 can be handled by reload. */
3099 extract_insn (insn);
3100 any_changes = 0;
3101 for (i = 0; i < recog_data.n_operands; i++)
3102 {
3103 orig_operand[i] = recog_data.operand[i];
3104 substed_operand[i] = recog_data.operand[i];
3105
3106 /* For an asm statement, every operand is eliminable. */
3107 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3108 {
3109 /* Check for setting a register that we know about. */
3110 if (recog_data.operand_type[i] != OP_IN
3111 && GET_CODE (orig_operand[i]) == REG)
3112 {
3113 /* If we are assigning to a register that can be eliminated, it
3114 must be as part of a PARALLEL, since the code above handles
3115 single SETs. We must indicate that we can no longer
3116 eliminate this reg. */
3117 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3118 ep++)
3119 if (ep->from_rtx == orig_operand[i] && ep->can_eliminate)
3120 ep->can_eliminate = 0;
3121 }
3122
3123 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3124 replace ? insn : NULL_RTX);
3125 if (substed_operand[i] != orig_operand[i])
3126 val = any_changes = 1;
3127 /* Terminate the search in check_eliminable_occurrences at
3128 this point. */
3129 *recog_data.operand_loc[i] = 0;
3130
3131 /* If an output operand changed from a REG to a MEM and INSN is an
3132 insn, write a CLOBBER insn. */
3133 if (recog_data.operand_type[i] != OP_IN
3134 && GET_CODE (orig_operand[i]) == REG
3135 && GET_CODE (substed_operand[i]) == MEM
3136 && replace)
3137 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3138 insn);
3139 }
3140 }
3141
3142 for (i = 0; i < recog_data.n_dups; i++)
3143 *recog_data.dup_loc[i]
3144 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3145
3146 /* If any eliminable remain, they aren't eliminable anymore. */
3147 check_eliminable_occurrences (old_body);
3148
3149 /* Substitute the operands; the new values are in the substed_operand
3150 array. */
3151 for (i = 0; i < recog_data.n_operands; i++)
3152 *recog_data.operand_loc[i] = substed_operand[i];
3153 for (i = 0; i < recog_data.n_dups; i++)
3154 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3155
3156 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3157 re-recognize the insn. We do this in case we had a simple addition
3158 but now can do this as a load-address. This saves an insn in this
3159 common case.
3160 If re-recognition fails, the old insn code number will still be used,
3161 and some register operands may have changed into PLUS expressions.
3162 These will be handled by find_reloads by loading them into a register
3163 again. */
3164
3165 if (val)
3166 {
3167 /* If we aren't replacing things permanently and we changed something,
3168 make another copy to ensure that all the RTL is new. Otherwise
3169 things can go wrong if find_reload swaps commutative operands
3170 and one is inside RTL that has been copied while the other is not. */
3171 new_body = old_body;
3172 if (! replace)
3173 {
3174 new_body = copy_insn (old_body);
3175 if (REG_NOTES (insn))
3176 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3177 }
3178 PATTERN (insn) = new_body;
3179
3180 /* If we had a move insn but now we don't, rerecognize it. This will
3181 cause spurious re-recognition if the old move had a PARALLEL since
3182 the new one still will, but we can't call single_set without
3183 having put NEW_BODY into the insn and the re-recognition won't
3184 hurt in this rare case. */
3185 /* ??? Why this huge if statement - why don't we just rerecognize the
3186 thing always? */
3187 if (! insn_is_asm
3188 && old_set != 0
3189 && ((GET_CODE (SET_SRC (old_set)) == REG
3190 && (GET_CODE (new_body) != SET
3191 || GET_CODE (SET_SRC (new_body)) != REG))
3192 /* If this was a load from or store to memory, compare
3193 the MEM in recog_data.operand to the one in the insn.
3194 If they are not equal, then rerecognize the insn. */
3195 || (old_set != 0
3196 && ((GET_CODE (SET_SRC (old_set)) == MEM
3197 && SET_SRC (old_set) != recog_data.operand[1])
3198 || (GET_CODE (SET_DEST (old_set)) == MEM
3199 && SET_DEST (old_set) != recog_data.operand[0])))
3200 /* If this was an add insn before, rerecognize. */
3201 || GET_CODE (SET_SRC (old_set)) == PLUS))
3202 {
3203 int new_icode = recog (PATTERN (insn), insn, 0);
3204 if (new_icode < 0)
3205 INSN_CODE (insn) = icode;
3206 }
3207 }
3208
3209 /* Restore the old body. If there were any changes to it, we made a copy
3210 of it while the changes were still in place, so we'll correctly return
3211 a modified insn below. */
3212 if (! replace)
3213 {
3214 /* Restore the old body. */
3215 for (i = 0; i < recog_data.n_operands; i++)
3216 *recog_data.operand_loc[i] = orig_operand[i];
3217 for (i = 0; i < recog_data.n_dups; i++)
3218 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3219 }
3220
3221 /* Update all elimination pairs to reflect the status after the current
3222 insn. The changes we make were determined by the earlier call to
3223 elimination_effects.
3224
3225 We also detect a cases where register elimination cannot be done,
3226 namely, if a register would be both changed and referenced outside a MEM
3227 in the resulting insn since such an insn is often undefined and, even if
3228 not, we cannot know what meaning will be given to it. Note that it is
3229 valid to have a register used in an address in an insn that changes it
3230 (presumably with a pre- or post-increment or decrement).
3231
3232 If anything changes, return nonzero. */
3233
3234 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3235 {
3236 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3237 ep->can_eliminate = 0;
3238
3239 ep->ref_outside_mem = 0;
3240
3241 if (ep->previous_offset != ep->offset)
3242 val = 1;
3243 }
3244
3245 done:
3246 /* If we changed something, perform elimination in REG_NOTES. This is
3247 needed even when REPLACE is zero because a REG_DEAD note might refer
3248 to a register that we eliminate and could cause a different number
3249 of spill registers to be needed in the final reload pass than in
3250 the pre-passes. */
3251 if (val && REG_NOTES (insn) != 0)
3252 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3253
3254 return val;
3255}
3256
3257/* Loop through all elimination pairs.
3258 Recalculate the number not at initial offset.
3259
3260 Compute the maximum offset (minimum offset if the stack does not
3261 grow downward) for each elimination pair. */
3262
3263static void
3264update_eliminable_offsets ()
3265{
3266 struct elim_table *ep;
3267
3268 num_not_at_initial_offset = 0;
3269 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3270 {
3271 ep->previous_offset = ep->offset;
3272 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3273 num_not_at_initial_offset++;
3274 }
3275}
3276
3277/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3278 replacement we currently believe is valid, mark it as not eliminable if X
3279 modifies DEST in any way other than by adding a constant integer to it.
3280
3281 If DEST is the frame pointer, we do nothing because we assume that
3282 all assignments to the hard frame pointer are nonlocal gotos and are being
3283 done at a time when they are valid and do not disturb anything else.
3284 Some machines want to eliminate a fake argument pointer with either the
3285 frame or stack pointer. Assignments to the hard frame pointer must not
3286 prevent this elimination.
3287
3288 Called via note_stores from reload before starting its passes to scan
3289 the insns of the function. */
3290
3291static void
3292mark_not_eliminable (dest, x, data)
3293 rtx dest;
3294 rtx x;
3295 void *data ATTRIBUTE_UNUSED;
3296{
3297 unsigned int i;
3298
3299 /* A SUBREG of a hard register here is just changing its mode. We should
3300 not see a SUBREG of an eliminable hard register, but check just in
3301 case. */
3302 if (GET_CODE (dest) == SUBREG)
3303 dest = SUBREG_REG (dest);
3304
3305 if (dest == hard_frame_pointer_rtx)
3306 return;
3307
3308 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3309 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3310 && (GET_CODE (x) != SET
3311 || GET_CODE (SET_SRC (x)) != PLUS
3312 || XEXP (SET_SRC (x), 0) != dest
3313 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3314 {
3315 reg_eliminate[i].can_eliminate_previous
3316 = reg_eliminate[i].can_eliminate = 0;
3317 num_eliminable--;
3318 }
3319}
3320
3321/* Verify that the initial elimination offsets did not change since the
3322 last call to set_initial_elim_offsets. This is used to catch cases
3323 where something illegal happened during reload_as_needed that could
3324 cause incorrect code to be generated if we did not check for it. */
3325
3326static void
3327verify_initial_elim_offsets ()
3328{
3329 int t;
3330
3331#ifdef ELIMINABLE_REGS
3332 struct elim_table *ep;
3333
3334 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3335 {
3336 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3337 if (t != ep->initial_offset)
3338 abort ();
3339 }
3340#else
3341 INITIAL_FRAME_POINTER_OFFSET (t);
3342 if (t != reg_eliminate[0].initial_offset)
3343 abort ();
3344#endif
3345}
3346
3347/* Reset all offsets on eliminable registers to their initial values. */
3348
3349static void
3350set_initial_elim_offsets ()
3351{
3352 struct elim_table *ep = reg_eliminate;
3353
3354#ifdef ELIMINABLE_REGS
3355 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3356 {
3357 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3358 ep->previous_offset = ep->offset = ep->initial_offset;
3359 }
3360#else
3361 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3362 ep->previous_offset = ep->offset = ep->initial_offset;
3363#endif
3364
3365 num_not_at_initial_offset = 0;
3366}
3367
3368/* Initialize the known label offsets.
3369 Set a known offset for each forced label to be at the initial offset
3370 of each elimination. We do this because we assume that all
3371 computed jumps occur from a location where each elimination is
3372 at its initial offset.
3373 For all other labels, show that we don't know the offsets. */
3374
3375static void
3376set_initial_label_offsets ()
3377{
3378 rtx x;
3379 memset ((char *) &offsets_known_at[get_first_label_num ()], 0, num_labels);
3380
3381 for (x = forced_labels; x; x = XEXP (x, 1))
3382 if (XEXP (x, 0))
3383 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3384}
3385
3386/* Set all elimination offsets to the known values for the code label given
3387 by INSN. */
3388
3389static void
3390set_offsets_for_label (insn)
3391 rtx insn;
3392{
3393 unsigned int i;
3394 int label_nr = CODE_LABEL_NUMBER (insn);
3395 struct elim_table *ep;
3396
3397 num_not_at_initial_offset = 0;
3398 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3399 {
3400 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3401 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3402 num_not_at_initial_offset++;
3403 }
3404}
3405
3406/* See if anything that happened changes which eliminations are valid.
3407 For example, on the Sparc, whether or not the frame pointer can
3408 be eliminated can depend on what registers have been used. We need
3409 not check some conditions again (such as flag_omit_frame_pointer)
3410 since they can't have changed. */
3411
3412static void
3413update_eliminables (pset)
3414 HARD_REG_SET *pset;
3415{
3416#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3417 int previous_frame_pointer_needed = frame_pointer_needed;
3418#endif
3419 struct elim_table *ep;
3420
3421 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3422 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3423#ifdef ELIMINABLE_REGS
3424 || ! CAN_ELIMINATE (ep->from, ep->to)
3425#endif
3426 )
3427 ep->can_eliminate = 0;
3428
3429 /* Look for the case where we have discovered that we can't replace
3430 register A with register B and that means that we will now be
3431 trying to replace register A with register C. This means we can
3432 no longer replace register C with register B and we need to disable
3433 such an elimination, if it exists. This occurs often with A == ap,
3434 B == sp, and C == fp. */
3435
3436 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3437 {
3438 struct elim_table *op;
3439 int new_to = -1;
3440
3441 if (! ep->can_eliminate && ep->can_eliminate_previous)
3442 {
3443 /* Find the current elimination for ep->from, if there is a
3444 new one. */
3445 for (op = reg_eliminate;
3446 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3447 if (op->from == ep->from && op->can_eliminate)
3448 {
3449 new_to = op->to;
3450 break;
3451 }
3452
3453 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3454 disable it. */
3455 for (op = reg_eliminate;
3456 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3457 if (op->from == new_to && op->to == ep->to)
3458 op->can_eliminate = 0;
3459 }
3460 }
3461
3462 /* See if any registers that we thought we could eliminate the previous
3463 time are no longer eliminable. If so, something has changed and we
3464 must spill the register. Also, recompute the number of eliminable
3465 registers and see if the frame pointer is needed; it is if there is
3466 no elimination of the frame pointer that we can perform. */
3467
3468 frame_pointer_needed = 1;
3469 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3470 {
3471 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3472 && ep->to != HARD_FRAME_POINTER_REGNUM)
3473 frame_pointer_needed = 0;
3474
3475 if (! ep->can_eliminate && ep->can_eliminate_previous)
3476 {
3477 ep->can_eliminate_previous = 0;
3478 SET_HARD_REG_BIT (*pset, ep->from);
3479 num_eliminable--;
3480 }
3481 }
3482
3483#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3484 /* If we didn't need a frame pointer last time, but we do now, spill
3485 the hard frame pointer. */
3486 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3487 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3488#endif
3489}
3490
3491/* Initialize the table of registers to eliminate. */
3492
3493static void
3494init_elim_table ()
3495{
3496 struct elim_table *ep;
3497#ifdef ELIMINABLE_REGS
3498 const struct elim_table_1 *ep1;
3499#endif
3500
3501 if (!reg_eliminate)
3502 reg_eliminate = (struct elim_table *)
3503 xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3504
3505 /* Does this function require a frame pointer? */
3506
3507 frame_pointer_needed = (! flag_omit_frame_pointer
3508#ifdef EXIT_IGNORE_STACK
3509 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3510 and restore sp for alloca. So we can't eliminate
3511 the frame pointer in that case. At some point,
3512 we should improve this by emitting the
3513 sp-adjusting insns for this case. */
3514 || (current_function_calls_alloca
3515 && EXIT_IGNORE_STACK)
3516#endif
3517 || FRAME_POINTER_REQUIRED);
3518
3519 num_eliminable = 0;
3520
3521#ifdef ELIMINABLE_REGS
3522 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3523 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3524 {
3525 ep->from = ep1->from;
3526 ep->to = ep1->to;
3527 ep->can_eliminate = ep->can_eliminate_previous
3528 = (CAN_ELIMINATE (ep->from, ep->to)
3529 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3530 }
3531#else
3532 reg_eliminate[0].from = reg_eliminate_1[0].from;
3533 reg_eliminate[0].to = reg_eliminate_1[0].to;
3534 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3535 = ! frame_pointer_needed;
3536#endif
3537
3538 /* Count the number of eliminable registers and build the FROM and TO
3539 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3540 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3541 We depend on this. */
3542 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3543 {
3544 num_eliminable += ep->can_eliminate;
3545 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3546 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3547 }
3548}
3549
3550/* Kick all pseudos out of hard register REGNO.
3551
3552 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3553 because we found we can't eliminate some register. In the case, no pseudos
3554 are allowed to be in the register, even if they are only in a block that
3555 doesn't require spill registers, unlike the case when we are spilling this
3556 hard reg to produce another spill register.
3557
3558 Return nonzero if any pseudos needed to be kicked out. */
3559
3560static void
3561spill_hard_reg (regno, cant_eliminate)
3562 unsigned int regno;
3563 int cant_eliminate;
3564{
3565 int i;
3566
3567 if (cant_eliminate)
3568 {
3569 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3570 regs_ever_live[regno] = 1;
3571 }
3572
3573 /* Spill every pseudo reg that was allocated to this reg
3574 or to something that overlaps this reg. */
3575
3576 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3577 if (reg_renumber[i] >= 0
3578 && (unsigned int) reg_renumber[i] <= regno
3579 && ((unsigned int) reg_renumber[i]
3580 + HARD_REGNO_NREGS ((unsigned int) reg_renumber[i],
3581 PSEUDO_REGNO_MODE (i))
3582 > regno))
3583 SET_REGNO_REG_SET (&spilled_pseudos, i);
3584}
3585
3586/* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3587 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3588
3589static void
3590ior_hard_reg_set (set1, set2)
3591 HARD_REG_SET *set1, *set2;
3592{
3593 IOR_HARD_REG_SET (*set1, *set2);
3594}
3595
3596/* After find_reload_regs has been run for all insn that need reloads,
3597 and/or spill_hard_regs was called, this function is used to actually
3598 spill pseudo registers and try to reallocate them. It also sets up the
3599 spill_regs array for use by choose_reload_regs. */
3600
3601static int
3602finish_spills (global)
3603 int global;
3604{
3605 struct insn_chain *chain;
3606 int something_changed = 0;
3607 int i;
3608
3609 /* Build the spill_regs array for the function. */
3610 /* If there are some registers still to eliminate and one of the spill regs
3611 wasn't ever used before, additional stack space may have to be
3612 allocated to store this register. Thus, we may have changed the offset
3613 between the stack and frame pointers, so mark that something has changed.
3614
3615 One might think that we need only set VAL to 1 if this is a call-used
3616 register. However, the set of registers that must be saved by the
3617 prologue is not identical to the call-used set. For example, the
3618 register used by the call insn for the return PC is a call-used register,
3619 but must be saved by the prologue. */
3620
3621 n_spills = 0;
3622 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3623 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3624 {
3625 spill_reg_order[i] = n_spills;
3626 spill_regs[n_spills++] = i;
3627 if (num_eliminable && ! regs_ever_live[i])
3628 something_changed = 1;
3629 regs_ever_live[i] = 1;
3630 }
3631 else
3632 spill_reg_order[i] = -1;
3633
3634 EXECUTE_IF_SET_IN_REG_SET
3635 (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i,
3636 {
3637 /* Record the current hard register the pseudo is allocated to in
3638 pseudo_previous_regs so we avoid reallocating it to the same
3639 hard reg in a later pass. */
3640 if (reg_renumber[i] < 0)
3641 abort ();
3642
3643 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3644 /* Mark it as no longer having a hard register home. */
3645 reg_renumber[i] = -1;
3646 /* We will need to scan everything again. */
3647 something_changed = 1;
3648 });
3649
3650 /* Retry global register allocation if possible. */
3651 if (global)
3652 {
3653 memset ((char *) pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3654 /* For every insn that needs reloads, set the registers used as spill
3655 regs in pseudo_forbidden_regs for every pseudo live across the
3656 insn. */
3657 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3658 {
3659 EXECUTE_IF_SET_IN_REG_SET
3660 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
3661 {
3662 ior_hard_reg_set (pseudo_forbidden_regs + i,
3663 &chain->used_spill_regs);
3664 });
3665 EXECUTE_IF_SET_IN_REG_SET
3666 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
3667 {
3668 ior_hard_reg_set (pseudo_forbidden_regs + i,
3669 &chain->used_spill_regs);
3670 });
3671 }
3672
3673 /* Retry allocating the spilled pseudos. For each reg, merge the
3674 various reg sets that indicate which hard regs can't be used,
3675 and call retry_global_alloc.
3676 We change spill_pseudos here to only contain pseudos that did not
3677 get a new hard register. */
3678 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3679 if (reg_old_renumber[i] != reg_renumber[i])
3680 {
3681 HARD_REG_SET forbidden;
3682 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3683 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3684 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3685 retry_global_alloc (i, forbidden);
3686 if (reg_renumber[i] >= 0)
3687 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3688 }
3689 }
3690
3691 /* Fix up the register information in the insn chain.
3692 This involves deleting those of the spilled pseudos which did not get
3693 a new hard register home from the live_{before,after} sets. */
3694 for (chain = reload_insn_chain; chain; chain = chain->next)
3695 {
3696 HARD_REG_SET used_by_pseudos;
3697 HARD_REG_SET used_by_pseudos2;
3698
3699 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3700 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3701
3702 /* Mark any unallocated hard regs as available for spills. That
3703 makes inheritance work somewhat better. */
3704 if (chain->need_reload)
3705 {
3706 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3707 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3708 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3709
3710 /* Save the old value for the sanity test below. */
3711 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3712
3713 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3714 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3715 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3716 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3717
3718 /* Make sure we only enlarge the set. */
3719 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3720 abort ();
3721 ok:;
3722 }
3723 }
3724
3725 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3726 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3727 {
3728 int regno = reg_renumber[i];
3729 if (reg_old_renumber[i] == regno)
3730 continue;
3731
3732 alter_reg (i, reg_old_renumber[i]);
3733 reg_old_renumber[i] = regno;
3734 if (rtl_dump_file)
3735 {
3736 if (regno == -1)
3737 fprintf (rtl_dump_file, " Register %d now on stack.\n\n", i);
3738 else
3739 fprintf (rtl_dump_file, " Register %d now in %d.\n\n",
3740 i, reg_renumber[i]);
3741 }
3742 }
3743
3744 return something_changed;
3745}
3746
3747/* Find all paradoxical subregs within X and update reg_max_ref_width.
3748 Also mark any hard registers used to store user variables as
3749 forbidden from being used for spill registers. */
3750
3751static void
3752scan_paradoxical_subregs (x)
3753 rtx x;
3754{
3755 int i;
3756 const char *fmt;
3757 enum rtx_code code = GET_CODE (x);
3758
3759 switch (code)
3760 {
3761 case REG:
3762#if 0
3763 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3764 && REG_USERVAR_P (x))
3765 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
3766#endif
3767 return;
3768
3769 case CONST_INT:
3770 case CONST:
3771 case SYMBOL_REF:
3772 case LABEL_REF:
3773 case CONST_DOUBLE:
3774 case CONST_VECTOR: /* shouldn't happen, but just in case. */
3775 case CC0:
3776 case PC:
3777 case USE:
3778 case CLOBBER:
3779 return;
3780
3781 case SUBREG:
3782 if (GET_CODE (SUBREG_REG (x)) == REG
3783 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3784 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3785 = GET_MODE_SIZE (GET_MODE (x));
3786 return;
3787
3788 default:
3789 break;
3790 }
3791
3792 fmt = GET_RTX_FORMAT (code);
3793 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3794 {
3795 if (fmt[i] == 'e')
3796 scan_paradoxical_subregs (XEXP (x, i));
3797 else if (fmt[i] == 'E')
3798 {
3799 int j;
3800 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3801 scan_paradoxical_subregs (XVECEXP (x, i, j));
3802 }
3803 }
3804}
3805
3806/* Reload pseudo-registers into hard regs around each insn as needed.
3807 Additional register load insns are output before the insn that needs it
3808 and perhaps store insns after insns that modify the reloaded pseudo reg.
3809
3810 reg_last_reload_reg and reg_reloaded_contents keep track of
3811 which registers are already available in reload registers.
3812 We update these for the reloads that we perform,
3813 as the insns are scanned. */
3814
3815static void
3816reload_as_needed (live_known)
3817 int live_known;
3818{
3819 struct insn_chain *chain;
3820#if defined (AUTO_INC_DEC)
3821 int i;
3822#endif
3823 rtx x;
3824
3825 memset ((char *) spill_reg_rtx, 0, sizeof spill_reg_rtx);
3826 memset ((char *) spill_reg_store, 0, sizeof spill_reg_store);
3827 reg_last_reload_reg = (rtx *) xcalloc (max_regno, sizeof (rtx));
3828 reg_has_output_reload = (char *) xmalloc (max_regno);
3829 CLEAR_HARD_REG_SET (reg_reloaded_valid);
3830
3831 set_initial_elim_offsets ();
3832
3833 for (chain = reload_insn_chain; chain; chain = chain->next)
3834 {
3835 rtx prev;
3836 rtx insn = chain->insn;
3837 rtx old_next = NEXT_INSN (insn);
3838
3839 /* If we pass a label, copy the offsets from the label information
3840 into the current offsets of each elimination. */
3841 if (GET_CODE (insn) == CODE_LABEL)
3842 set_offsets_for_label (insn);
3843
3844 else if (INSN_P (insn))
3845 {
3846 rtx oldpat = PATTERN (insn);
3847
3848 /* If this is a USE and CLOBBER of a MEM, ensure that any
3849 references to eliminable registers have been removed. */
3850
3851 if ((GET_CODE (PATTERN (insn)) == USE
3852 || GET_CODE (PATTERN (insn)) == CLOBBER)
3853 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3854 XEXP (XEXP (PATTERN (insn), 0), 0)
3855 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3856 GET_MODE (XEXP (PATTERN (insn), 0)),
3857 NULL_RTX);
3858
3859 /* If we need to do register elimination processing, do so.
3860 This might delete the insn, in which case we are done. */
3861 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3862 {
3863 eliminate_regs_in_insn (insn, 1);
3864 if (GET_CODE (insn) == NOTE)
3865 {
3866 update_eliminable_offsets ();
3867 continue;
3868 }
3869 }
3870
3871 /* If need_elim is nonzero but need_reload is zero, one might think
3872 that we could simply set n_reloads to 0. However, find_reloads
3873 could have done some manipulation of the insn (such as swapping
3874 commutative operands), and these manipulations are lost during
3875 the first pass for every insn that needs register elimination.
3876 So the actions of find_reloads must be redone here. */
3877
3878 if (! chain->need_elim && ! chain->need_reload
3879 && ! chain->need_operand_change)
3880 n_reloads = 0;
3881 /* First find the pseudo regs that must be reloaded for this insn.
3882 This info is returned in the tables reload_... (see reload.h).
3883 Also modify the body of INSN by substituting RELOAD
3884 rtx's for those pseudo regs. */
3885 else
3886 {
3887 memset (reg_has_output_reload, 0, max_regno);
3888 CLEAR_HARD_REG_SET (reg_is_output_reload);
3889
3890 find_reloads (insn, 1, spill_indirect_levels, live_known,
3891 spill_reg_order);
3892 }
3893
3894 if (n_reloads > 0)
3895 {
3896 rtx next = NEXT_INSN (insn);
3897 rtx p;
3898
3899 prev = PREV_INSN (insn);
3900
3901 /* Now compute which reload regs to reload them into. Perhaps
3902 reusing reload regs from previous insns, or else output
3903 load insns to reload them. Maybe output store insns too.
3904 Record the choices of reload reg in reload_reg_rtx. */
3905 choose_reload_regs (chain);
3906
3907 /* Merge any reloads that we didn't combine for fear of
3908 increasing the number of spill registers needed but now
3909 discover can be safely merged. */
3910 if (SMALL_REGISTER_CLASSES)
3911 merge_assigned_reloads (insn);
3912
3913 /* Generate the insns to reload operands into or out of
3914 their reload regs. */
3915 emit_reload_insns (chain);
3916
3917 /* Substitute the chosen reload regs from reload_reg_rtx
3918 into the insn's body (or perhaps into the bodies of other
3919 load and store insn that we just made for reloading
3920 and that we moved the structure into). */
3921 subst_reloads (insn);
3922
3923 /* If this was an ASM, make sure that all the reload insns
3924 we have generated are valid. If not, give an error
3925 and delete them. */
3926
3927 if (asm_noperands (PATTERN (insn)) >= 0)
3928 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3929 if (p != insn && INSN_P (p)
3930 && (recog_memoized (p) < 0
3931 || (extract_insn (p), ! constrain_operands (1))))
3932 {
3933 error_for_asm (insn,
3934 "`asm' operand requires impossible reload");
3935 delete_insn (p);
3936 }
3937 }
3938
3939 if (num_eliminable && chain->need_elim)
3940 update_eliminable_offsets ();
3941
3942 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3943 is no longer validly lying around to save a future reload.
3944 Note that this does not detect pseudos that were reloaded
3945 for this insn in order to be stored in
3946 (obeying register constraints). That is correct; such reload
3947 registers ARE still valid. */
3948 note_stores (oldpat, forget_old_reloads_1, NULL);
3949
3950 /* There may have been CLOBBER insns placed after INSN. So scan
3951 between INSN and NEXT and use them to forget old reloads. */
3952 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
3953 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3954 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
3955
3956#ifdef AUTO_INC_DEC
3957 /* Likewise for regs altered by auto-increment in this insn.
3958 REG_INC notes have been changed by reloading:
3959 find_reloads_address_1 records substitutions for them,
3960 which have been performed by subst_reloads above. */
3961 for (i = n_reloads - 1; i >= 0; i--)
3962 {
3963 rtx in_reg = rld[i].in_reg;
3964 if (in_reg)
3965 {
3966 enum rtx_code code = GET_CODE (in_reg);
3967 /* PRE_INC / PRE_DEC will have the reload register ending up
3968 with the same value as the stack slot, but that doesn't
3969 hold true for POST_INC / POST_DEC. Either we have to
3970 convert the memory access to a true POST_INC / POST_DEC,
3971 or we can't use the reload register for inheritance. */
3972 if ((code == POST_INC || code == POST_DEC)
3973 && TEST_HARD_REG_BIT (reg_reloaded_valid,
3974 REGNO (rld[i].reg_rtx))
3975 /* Make sure it is the inc/dec pseudo, and not
3976 some other (e.g. output operand) pseudo. */
3977 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
3978 == REGNO (XEXP (in_reg, 0))))
3979
3980 {
3981 rtx reload_reg = rld[i].reg_rtx;
3982 enum machine_mode mode = GET_MODE (reload_reg);
3983 int n = 0;
3984 rtx p;
3985
3986 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
3987 {
3988 /* We really want to ignore REG_INC notes here, so
3989 use PATTERN (p) as argument to reg_set_p . */
3990 if (reg_set_p (reload_reg, PATTERN (p)))
3991 break;
3992 n = count_occurrences (PATTERN (p), reload_reg, 0);
3993 if (! n)
3994 continue;
3995 if (n == 1)
3996 {
3997 n = validate_replace_rtx (reload_reg,
3998 gen_rtx (code, mode,
3999 reload_reg),
4000 p);
4001
4002 /* We must also verify that the constraints
4003 are met after the replacement. */
4004 extract_insn (p);
4005 if (n)
4006 n = constrain_operands (1);
4007 else
4008 break;
4009
4010 /* If the constraints were not met, then
4011 undo the replacement. */
4012 if (!n)
4013 {
4014 validate_replace_rtx (gen_rtx (code, mode,
4015 reload_reg),
4016 reload_reg, p);
4017 break;
4018 }
4019
4020 }
4021 break;
4022 }
4023 if (n == 1)
4024 {
4025 REG_NOTES (p)
4026 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4027 REG_NOTES (p));
4028 /* Mark this as having an output reload so that the
4029 REG_INC processing code below won't invalidate
4030 the reload for inheritance. */
4031 SET_HARD_REG_BIT (reg_is_output_reload,
4032 REGNO (reload_reg));
4033 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4034 }
4035 else
4036 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4037 NULL);
4038 }
4039 else if ((code == PRE_INC || code == PRE_DEC)
4040 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4041 REGNO (rld[i].reg_rtx))
4042 /* Make sure it is the inc/dec pseudo, and not
4043 some other (e.g. output operand) pseudo. */
4044 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4045 == REGNO (XEXP (in_reg, 0))))
4046 {
4047 SET_HARD_REG_BIT (reg_is_output_reload,
4048 REGNO (rld[i].reg_rtx));
4049 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4050 }
4051 }
4052 }
4053 /* If a pseudo that got a hard register is auto-incremented,
4054 we must purge records of copying it into pseudos without
4055 hard registers. */
4056 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4057 if (REG_NOTE_KIND (x) == REG_INC)
4058 {
4059 /* See if this pseudo reg was reloaded in this insn.
4060 If so, its last-reload info is still valid
4061 because it is based on this insn's reload. */
4062 for (i = 0; i < n_reloads; i++)
4063 if (rld[i].out == XEXP (x, 0))
4064 break;
4065
4066 if (i == n_reloads)
4067 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4068 }
4069#endif
4070 }
4071 /* A reload reg's contents are unknown after a label. */
4072 if (GET_CODE (insn) == CODE_LABEL)
4073 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4074
4075 /* Don't assume a reload reg is still good after a call insn
4076 if it is a call-used reg. */
4077 else if (GET_CODE (insn) == CALL_INSN)
4078 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4079 }
4080
4081 /* Clean up. */
4082 free (reg_last_reload_reg);
4083 free (reg_has_output_reload);
4084}
4085
4086/* Discard all record of any value reloaded from X,
4087 or reloaded in X from someplace else;
4088 unless X is an output reload reg of the current insn.
4089
4090 X may be a hard reg (the reload reg)
4091 or it may be a pseudo reg that was reloaded from. */
4092
4093static void
4094forget_old_reloads_1 (x, ignored, data)
4095 rtx x;
4096 rtx ignored ATTRIBUTE_UNUSED;
4097 void *data ATTRIBUTE_UNUSED;
4098{
4099 unsigned int regno;
4100 unsigned int nr;
4101 int offset = 0;
4102
4103 /* note_stores does give us subregs of hard regs,
4104 subreg_regno_offset will abort if it is not a hard reg. */
4105 while (GET_CODE (x) == SUBREG)
4106 {
4107 offset += subreg_regno_offset (REGNO (SUBREG_REG (x)),
4108 GET_MODE (SUBREG_REG (x)),
4109 SUBREG_BYTE (x),
4110 GET_MODE (x));
4111 x = SUBREG_REG (x);
4112 }
4113
4114 if (GET_CODE (x) != REG)
4115 return;
4116
4117 regno = REGNO (x) + offset;
4118
4119 if (regno >= FIRST_PSEUDO_REGISTER)
4120 nr = 1;
4121 else
4122 {
4123 unsigned int i;
4124
4125 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4126 /* Storing into a spilled-reg invalidates its contents.
4127 This can happen if a block-local pseudo is allocated to that reg
4128 and it wasn't spilled because this block's total need is 0.
4129 Then some insn might have an optional reload and use this reg. */
4130 for (i = 0; i < nr; i++)
4131 /* But don't do this if the reg actually serves as an output
4132 reload reg in the current instruction. */
4133 if (n_reloads == 0
4134 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4135 {
4136 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4137 spill_reg_store[regno + i] = 0;
4138 }
4139 }
4140
4141 /* Since value of X has changed,
4142 forget any value previously copied from it. */
4143
4144 while (nr-- > 0)
4145 /* But don't forget a copy if this is the output reload
4146 that establishes the copy's validity. */
4147 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4148 reg_last_reload_reg[regno + nr] = 0;
4149}
4150
4151/* The following HARD_REG_SETs indicate when each hard register is
4152 used for a reload of various parts of the current insn. */
4153
4154/* If reg is unavailable for all reloads. */
4155static HARD_REG_SET reload_reg_unavailable;
4156/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4157static HARD_REG_SET reload_reg_used;
4158/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4159static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4160/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4161static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4162/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4163static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4164/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4165static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4166/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4167static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4168/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4169static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4170/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4171static HARD_REG_SET reload_reg_used_in_op_addr;
4172/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4173static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4174/* If reg is in use for a RELOAD_FOR_INSN reload. */
4175static HARD_REG_SET reload_reg_used_in_insn;
4176/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4177static HARD_REG_SET reload_reg_used_in_other_addr;
4178
4179/* If reg is in use as a reload reg for any sort of reload. */
4180static HARD_REG_SET reload_reg_used_at_all;
4181
4182/* If reg is use as an inherited reload. We just mark the first register
4183 in the group. */
4184static HARD_REG_SET reload_reg_used_for_inherit;
4185
4186/* Records which hard regs are used in any way, either as explicit use or
4187 by being allocated to a pseudo during any point of the current insn. */
4188static HARD_REG_SET reg_used_in_insn;
4189
4190/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4191 TYPE. MODE is used to indicate how many consecutive regs are
4192 actually used. */
4193
4194static void
4195mark_reload_reg_in_use (regno, opnum, type, mode)
4196 unsigned int regno;
4197 int opnum;
4198 enum reload_type type;
4199 enum machine_mode mode;
4200{
4201 unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4202 unsigned int i;
4203
4204 for (i = regno; i < nregs + regno; i++)
4205 {
4206 switch (type)
4207 {
4208 case RELOAD_OTHER:
4209 SET_HARD_REG_BIT (reload_reg_used, i);
4210 break;
4211
4212 case RELOAD_FOR_INPUT_ADDRESS:
4213 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4214 break;
4215
4216 case RELOAD_FOR_INPADDR_ADDRESS:
4217 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4218 break;
4219
4220 case RELOAD_FOR_OUTPUT_ADDRESS:
4221 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4222 break;
4223
4224 case RELOAD_FOR_OUTADDR_ADDRESS:
4225 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4226 break;
4227
4228 case RELOAD_FOR_OPERAND_ADDRESS:
4229 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4230 break;
4231
4232 case RELOAD_FOR_OPADDR_ADDR:
4233 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4234 break;
4235
4236 case RELOAD_FOR_OTHER_ADDRESS:
4237 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4238 break;
4239
4240 case RELOAD_FOR_INPUT:
4241 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4242 break;
4243
4244 case RELOAD_FOR_OUTPUT:
4245 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4246 break;
4247
4248 case RELOAD_FOR_INSN:
4249 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4250 break;
4251 }
4252
4253 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4254 }
4255}
4256
4257/* Similarly, but show REGNO is no longer in use for a reload. */
4258
4259static void
4260clear_reload_reg_in_use (regno, opnum, type, mode)
4261 unsigned int regno;
4262 int opnum;
4263 enum reload_type type;
4264 enum machine_mode mode;
4265{
4266 unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4267 unsigned int start_regno, end_regno, r;
4268 int i;
4269 /* A complication is that for some reload types, inheritance might
4270 allow multiple reloads of the same types to share a reload register.
4271 We set check_opnum if we have to check only reloads with the same
4272 operand number, and check_any if we have to check all reloads. */
4273 int check_opnum = 0;
4274 int check_any = 0;
4275 HARD_REG_SET *used_in_set;
4276
4277 switch (type)
4278 {
4279 case RELOAD_OTHER:
4280 used_in_set = &reload_reg_used;
4281 break;
4282
4283 case RELOAD_FOR_INPUT_ADDRESS:
4284 used_in_set = &reload_reg_used_in_input_addr[opnum];
4285 break;
4286
4287 case RELOAD_FOR_INPADDR_ADDRESS:
4288 check_opnum = 1;
4289 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4290 break;
4291
4292 case RELOAD_FOR_OUTPUT_ADDRESS:
4293 used_in_set = &reload_reg_used_in_output_addr[opnum];
4294 break;
4295
4296 case RELOAD_FOR_OUTADDR_ADDRESS:
4297 check_opnum = 1;
4298 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4299 break;
4300
4301 case RELOAD_FOR_OPERAND_ADDRESS:
4302 used_in_set = &reload_reg_used_in_op_addr;
4303 break;
4304
4305 case RELOAD_FOR_OPADDR_ADDR:
4306 check_any = 1;
4307 used_in_set = &reload_reg_used_in_op_addr_reload;
4308 break;
4309
4310 case RELOAD_FOR_OTHER_ADDRESS:
4311 used_in_set = &reload_reg_used_in_other_addr;
4312 check_any = 1;
4313 break;
4314
4315 case RELOAD_FOR_INPUT:
4316 used_in_set = &reload_reg_used_in_input[opnum];
4317 break;
4318
4319 case RELOAD_FOR_OUTPUT:
4320 used_in_set = &reload_reg_used_in_output[opnum];
4321 break;
4322
4323 case RELOAD_FOR_INSN:
4324 used_in_set = &reload_reg_used_in_insn;
4325 break;
4326 default:
4327 abort ();
4328 }
4329 /* We resolve conflicts with remaining reloads of the same type by
4330 excluding the intervals of of reload registers by them from the
4331 interval of freed reload registers. Since we only keep track of
4332 one set of interval bounds, we might have to exclude somewhat
4333 more than what would be necessary if we used a HARD_REG_SET here.
4334 But this should only happen very infrequently, so there should
4335 be no reason to worry about it. */
4336
4337 start_regno = regno;
4338 end_regno = regno + nregs;
4339 if (check_opnum || check_any)
4340 {
4341 for (i = n_reloads - 1; i >= 0; i--)
4342 {
4343 if (rld[i].when_needed == type
4344 && (check_any || rld[i].opnum == opnum)
4345 && rld[i].reg_rtx)
4346 {
4347 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4348 unsigned int conflict_end
4349 = (conflict_start
4350 + HARD_REGNO_NREGS (conflict_start, rld[i].mode));
4351
4352 /* If there is an overlap with the first to-be-freed register,
4353 adjust the interval start. */
4354 if (conflict_start <= start_regno && conflict_end > start_regno)
4355 start_regno = conflict_end;
4356 /* Otherwise, if there is a conflict with one of the other
4357 to-be-freed registers, adjust the interval end. */
4358 if (conflict_start > start_regno && conflict_start < end_regno)
4359 end_regno = conflict_start;
4360 }
4361 }
4362 }
4363
4364 for (r = start_regno; r < end_regno; r++)
4365 CLEAR_HARD_REG_BIT (*used_in_set, r);
4366}
4367
4368/* 1 if reg REGNO is free as a reload reg for a reload of the sort
4369 specified by OPNUM and TYPE. */
4370
4371static int
4372reload_reg_free_p (regno, opnum, type)
4373 unsigned int regno;
4374 int opnum;
4375 enum reload_type type;
4376{
4377 int i;
4378
4379 /* In use for a RELOAD_OTHER means it's not available for anything. */
4380 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4381 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4382 return 0;
4383
4384 switch (type)
4385 {
4386 case RELOAD_OTHER:
4387 /* In use for anything means we can't use it for RELOAD_OTHER. */
4388 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4389 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4390 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4391 return 0;
4392
4393 for (i = 0; i < reload_n_operands; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4395 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4396 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4397 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4398 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4399 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4400 return 0;
4401
4402 return 1;
4403
4404 case RELOAD_FOR_INPUT:
4405 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4406 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4407 return 0;
4408
4409 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4410 return 0;
4411
4412 /* If it is used for some other input, can't use it. */
4413 for (i = 0; i < reload_n_operands; i++)
4414 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4415 return 0;
4416
4417 /* If it is used in a later operand's address, can't use it. */
4418 for (i = opnum + 1; i < reload_n_operands; i++)
4419 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4420 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4421 return 0;
4422
4423 return 1;
4424
4425 case RELOAD_FOR_INPUT_ADDRESS:
4426 /* Can't use a register if it is used for an input address for this
4427 operand or used as an input in an earlier one. */
4428 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4429 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4430 return 0;
4431
4432 for (i = 0; i < opnum; i++)
4433 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4434 return 0;
4435
4436 return 1;
4437
4438 case RELOAD_FOR_INPADDR_ADDRESS:
4439 /* Can't use a register if it is used for an input address
4440 for this operand or used as an input in an earlier
4441 one. */
4442 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4443 return 0;
4444
4445 for (i = 0; i < opnum; i++)
4446 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4447 return 0;
4448
4449 return 1;
4450
4451 case RELOAD_FOR_OUTPUT_ADDRESS:
4452 /* Can't use a register if it is used for an output address for this
4453 operand or used as an output in this or a later operand. Note
4454 that multiple output operands are emitted in reverse order, so
4455 the conflicting ones are those with lower indices. */
4456 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4457 return 0;
4458
4459 for (i = 0; i <= opnum; i++)
4460 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4461 return 0;
4462
4463 return 1;
4464
4465 case RELOAD_FOR_OUTADDR_ADDRESS:
4466 /* Can't use a register if it is used for an output address
4467 for this operand or used as an output in this or a
4468 later operand. Note that multiple output operands are
4469 emitted in reverse order, so the conflicting ones are
4470 those with lower indices. */
4471 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4472 return 0;
4473
4474 for (i = 0; i <= opnum; i++)
4475 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4476 return 0;
4477
4478 return 1;
4479
4480 case RELOAD_FOR_OPERAND_ADDRESS:
4481 for (i = 0; i < reload_n_operands; i++)
4482 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4483 return 0;
4484
4485 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4486 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4487
4488 case RELOAD_FOR_OPADDR_ADDR:
4489 for (i = 0; i < reload_n_operands; i++)
4490 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4491 return 0;
4492
4493 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4494
4495 case RELOAD_FOR_OUTPUT:
4496 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4497 outputs, or an operand address for this or an earlier output.
4498 Note that multiple output operands are emitted in reverse order,
4499 so the conflicting ones are those with higher indices. */
4500 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4501 return 0;
4502
4503 for (i = 0; i < reload_n_operands; i++)
4504 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4505 return 0;
4506
4507 for (i = opnum; i < reload_n_operands; i++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4509 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4510 return 0;
4511
4512 return 1;
4513
4514 case RELOAD_FOR_INSN:
4515 for (i = 0; i < reload_n_operands; i++)
4516 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4517 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4518 return 0;
4519
4520 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4521 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4522
4523 case RELOAD_FOR_OTHER_ADDRESS:
4524 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4525 }
4526 abort ();
4527}
4528
4529/* Return 1 if the value in reload reg REGNO, as used by a reload
4530 needed for the part of the insn specified by OPNUM and TYPE,
4531 is still available in REGNO at the end of the insn.
4532
4533 We can assume that the reload reg was already tested for availability
4534 at the time it is needed, and we should not check this again,
4535 in case the reg has already been marked in use. */
4536
4537static int
4538reload_reg_reaches_end_p (regno, opnum, type)
4539 unsigned int regno;
4540 int opnum;
4541 enum reload_type type;
4542{
4543 int i;
4544
4545 switch (type)
4546 {
4547 case RELOAD_OTHER:
4548 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4549 its value must reach the end. */
4550 return 1;
4551
4552 /* If this use is for part of the insn,
4553 its value reaches if no subsequent part uses the same register.
4554 Just like the above function, don't try to do this with lots
4555 of fallthroughs. */
4556
4557 case RELOAD_FOR_OTHER_ADDRESS:
4558 /* Here we check for everything else, since these don't conflict
4559 with anything else and everything comes later. */
4560
4561 for (i = 0; i < reload_n_operands; i++)
4562 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4563 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4564 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4565 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4566 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4567 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4568 return 0;
4569
4570 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4571 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4572 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4573
4574 case RELOAD_FOR_INPUT_ADDRESS:
4575 case RELOAD_FOR_INPADDR_ADDRESS:
4576 /* Similar, except that we check only for this and subsequent inputs
4577 and the address of only subsequent inputs and we do not need
4578 to check for RELOAD_OTHER objects since they are known not to
4579 conflict. */
4580
4581 for (i = opnum; i < reload_n_operands; i++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4583 return 0;
4584
4585 for (i = opnum + 1; i < reload_n_operands; i++)
4586 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4587 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4588 return 0;
4589
4590 for (i = 0; i < reload_n_operands; i++)
4591 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4592 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4593 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4594 return 0;
4595
4596 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4597 return 0;
4598
4599 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4600 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4601 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4602
4603 case RELOAD_FOR_INPUT:
4604 /* Similar to input address, except we start at the next operand for
4605 both input and input address and we do not check for
4606 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4607 would conflict. */
4608
4609 for (i = opnum + 1; i < reload_n_operands; i++)
4610 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4611 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4612 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4613 return 0;
4614
4615 /* ... fall through ... */
4616
4617 case RELOAD_FOR_OPERAND_ADDRESS:
4618 /* Check outputs and their addresses. */
4619
4620 for (i = 0; i < reload_n_operands; i++)
4621 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4622 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4623 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4624 return 0;
4625
4626 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4627
4628 case RELOAD_FOR_OPADDR_ADDR:
4629 for (i = 0; i < reload_n_operands; i++)
4630 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4631 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4632 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4633 return 0;
4634
4635 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4636 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4637 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4638
4639 case RELOAD_FOR_INSN:
4640 /* These conflict with other outputs with RELOAD_OTHER. So
4641 we need only check for output addresses. */
4642
4643 opnum = reload_n_operands;
4644
4645 /* ... fall through ... */
4646
4647 case RELOAD_FOR_OUTPUT:
4648 case RELOAD_FOR_OUTPUT_ADDRESS:
4649 case RELOAD_FOR_OUTADDR_ADDRESS:
4650 /* We already know these can't conflict with a later output. So the
4651 only thing to check are later output addresses.
4652 Note that multiple output operands are emitted in reverse order,
4653 so the conflicting ones are those with lower indices. */
4654 for (i = 0; i < opnum; i++)
4655 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4656 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4657 return 0;
4658
4659 return 1;
4660 }
4661
4662 abort ();
4663}
4664
4665/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4666 Return 0 otherwise.
4667
4668 This function uses the same algorithm as reload_reg_free_p above. */
4669
4670int
4671reloads_conflict (r1, r2)
4672 int r1, r2;
4673{
4674 enum reload_type r1_type = rld[r1].when_needed;
4675 enum reload_type r2_type = rld[r2].when_needed;
4676 int r1_opnum = rld[r1].opnum;
4677 int r2_opnum = rld[r2].opnum;
4678
4679 /* RELOAD_OTHER conflicts with everything. */
4680 if (r2_type == RELOAD_OTHER)
4681 return 1;
4682
4683 /* Otherwise, check conflicts differently for each type. */
4684
4685 switch (r1_type)
4686 {
4687 case RELOAD_FOR_INPUT:
4688 return (r2_type == RELOAD_FOR_INSN
4689 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4690 || r2_type == RELOAD_FOR_OPADDR_ADDR
4691 || r2_type == RELOAD_FOR_INPUT
4692 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4693 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4694 && r2_opnum > r1_opnum));
4695
4696 case RELOAD_FOR_INPUT_ADDRESS:
4697 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4698 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4699
4700 case RELOAD_FOR_INPADDR_ADDRESS:
4701 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4702 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4703
4704 case RELOAD_FOR_OUTPUT_ADDRESS:
4705 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4706 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4707
4708 case RELOAD_FOR_OUTADDR_ADDRESS:
4709 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4710 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4711
4712 case RELOAD_FOR_OPERAND_ADDRESS:
4713 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4714 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4715
4716 case RELOAD_FOR_OPADDR_ADDR:
4717 return (r2_type == RELOAD_FOR_INPUT
4718 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4719
4720 case RELOAD_FOR_OUTPUT:
4721 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4722 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4723 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4724 && r2_opnum >= r1_opnum));
4725
4726 case RELOAD_FOR_INSN:
4727 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4728 || r2_type == RELOAD_FOR_INSN
4729 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4730
4731 case RELOAD_FOR_OTHER_ADDRESS:
4732 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4733
4734 case RELOAD_OTHER:
4735 return 1;
4736
4737 default:
4738 abort ();
4739 }
4740}
4741
4742/* Indexed by reload number, 1 if incoming value
4743 inherited from previous insns. */
4744char reload_inherited[MAX_RELOADS];
4745
4746/* For an inherited reload, this is the insn the reload was inherited from,
4747 if we know it. Otherwise, this is 0. */
4748rtx reload_inheritance_insn[MAX_RELOADS];
4749
4750/* If non-zero, this is a place to get the value of the reload,
4751 rather than using reload_in. */
4752rtx reload_override_in[MAX_RELOADS];
4753
4754/* For each reload, the hard register number of the register used,
4755 or -1 if we did not need a register for this reload. */
4756int reload_spill_index[MAX_RELOADS];
4757
4758/* Subroutine of free_for_value_p, used to check a single register.
4759 START_REGNO is the starting regno of the full reload register
4760 (possibly comprising multiple hard registers) that we are considering. */
4761
4762static int
4763reload_reg_free_for_value_p (start_regno, regno, opnum, type, value, out,
4764 reloadnum, ignore_address_reloads)
4765 int start_regno, regno;
4766 int opnum;
4767 enum reload_type type;
4768 rtx value, out;
4769 int reloadnum;
4770 int ignore_address_reloads;
4771{
4772 int time1;
4773 /* Set if we see an input reload that must not share its reload register
4774 with any new earlyclobber, but might otherwise share the reload
4775 register with an output or input-output reload. */
4776 int check_earlyclobber = 0;
4777 int i;
4778 int copy = 0;
4779
4780 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4781 return 0;
4782
4783 if (out == const0_rtx)
4784 {
4785 copy = 1;
4786 out = NULL_RTX;
4787 }
4788
4789 /* We use some pseudo 'time' value to check if the lifetimes of the
4790 new register use would overlap with the one of a previous reload
4791 that is not read-only or uses a different value.
4792 The 'time' used doesn't have to be linear in any shape or form, just
4793 monotonic.
4794 Some reload types use different 'buckets' for each operand.
4795 So there are MAX_RECOG_OPERANDS different time values for each
4796 such reload type.
4797 We compute TIME1 as the time when the register for the prospective
4798 new reload ceases to be live, and TIME2 for each existing
4799 reload as the time when that the reload register of that reload
4800 becomes live.
4801 Where there is little to be gained by exact lifetime calculations,
4802 we just make conservative assumptions, i.e. a longer lifetime;
4803 this is done in the 'default:' cases. */
4804 switch (type)
4805 {
4806 case RELOAD_FOR_OTHER_ADDRESS:
4807 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
4808 time1 = copy ? 0 : 1;
4809 break;
4810 case RELOAD_OTHER:
4811 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4812 break;
4813 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4814 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
4815 respectively, to the time values for these, we get distinct time
4816 values. To get distinct time values for each operand, we have to
4817 multiply opnum by at least three. We round that up to four because
4818 multiply by four is often cheaper. */
4819 case RELOAD_FOR_INPADDR_ADDRESS:
4820 time1 = opnum * 4 + 2;
4821 break;
4822 case RELOAD_FOR_INPUT_ADDRESS:
4823 time1 = opnum * 4 + 3;
4824 break;
4825 case RELOAD_FOR_INPUT:
4826 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4827 executes (inclusive). */
4828 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4829 break;
4830 case RELOAD_FOR_OPADDR_ADDR:
4831 /* opnum * 4 + 4
4832 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4833 time1 = MAX_RECOG_OPERANDS * 4 + 1;
4834 break;
4835 case RELOAD_FOR_OPERAND_ADDRESS:
4836 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4837 is executed. */
4838 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4839 break;
4840 case RELOAD_FOR_OUTADDR_ADDRESS:
4841 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4842 break;
4843 case RELOAD_FOR_OUTPUT_ADDRESS:
4844 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4845 break;
4846 default:
4847 time1 = MAX_RECOG_OPERANDS * 5 + 5;
4848 }
4849
4850 for (i = 0; i < n_reloads; i++)
4851 {
4852 rtx reg = rld[i].reg_rtx;
4853 if (reg && GET_CODE (reg) == REG
4854 && ((unsigned) regno - true_regnum (reg)
4855 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned) 1)
4856 && i != reloadnum)
4857 {
4858 rtx other_input = rld[i].in;
4859
4860 /* If the other reload loads the same input value, that
4861 will not cause a conflict only if it's loading it into
4862 the same register. */
4863 if (true_regnum (reg) != start_regno)
4864 other_input = NULL_RTX;
4865 if (! other_input || ! rtx_equal_p (other_input, value)
4866 || rld[i].out || out)
4867 {
4868 int time2;
4869 switch (rld[i].when_needed)
4870 {
4871 case RELOAD_FOR_OTHER_ADDRESS:
4872 time2 = 0;
4873 break;
4874 case RELOAD_FOR_INPADDR_ADDRESS:
4875 /* find_reloads makes sure that a
4876 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4877 by at most one - the first -
4878 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
4879 address reload is inherited, the address address reload
4880 goes away, so we can ignore this conflict. */
4881 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4882 && ignore_address_reloads
4883 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4884 Then the address address is still needed to store
4885 back the new address. */
4886 && ! rld[reloadnum].out)
4887 continue;
4888 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4889 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4890 reloads go away. */
4891 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4892 && ignore_address_reloads
4893 /* Unless we are reloading an auto_inc expression. */
4894 && ! rld[reloadnum].out)
4895 continue;
4896 time2 = rld[i].opnum * 4 + 2;
4897 break;
4898 case RELOAD_FOR_INPUT_ADDRESS:
4899 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4900 && ignore_address_reloads
4901 && ! rld[reloadnum].out)
4902 continue;
4903 time2 = rld[i].opnum * 4 + 3;
4904 break;
4905 case RELOAD_FOR_INPUT:
4906 time2 = rld[i].opnum * 4 + 4;
4907 check_earlyclobber = 1;
4908 break;
4909 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
4910 == MAX_RECOG_OPERAND * 4 */
4911 case RELOAD_FOR_OPADDR_ADDR:
4912 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
4913 && ignore_address_reloads
4914 && ! rld[reloadnum].out)
4915 continue;
4916 time2 = MAX_RECOG_OPERANDS * 4 + 1;
4917 break;
4918 case RELOAD_FOR_OPERAND_ADDRESS:
4919 time2 = MAX_RECOG_OPERANDS * 4 + 2;
4920 check_earlyclobber = 1;
4921 break;
4922 case RELOAD_FOR_INSN:
4923 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4924 break;
4925 case RELOAD_FOR_OUTPUT:
4926 /* All RELOAD_FOR_OUTPUT reloads become live just after the
4927 instruction is executed. */
4928 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4929 break;
4930 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4931 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4932 value. */
4933 case RELOAD_FOR_OUTADDR_ADDRESS:
4934 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
4935 && ignore_address_reloads
4936 && ! rld[reloadnum].out)
4937 continue;
4938 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
4939 break;
4940 case RELOAD_FOR_OUTPUT_ADDRESS:
4941 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
4942 break;
4943 case RELOAD_OTHER:
4944 /* If there is no conflict in the input part, handle this
4945 like an output reload. */
4946 if (! rld[i].in || rtx_equal_p (other_input, value))
4947 {
4948 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4949 /* Earlyclobbered outputs must conflict with inputs. */
4950 if (earlyclobber_operand_p (rld[i].out))
4951 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4952
4953 break;
4954 }
4955 time2 = 1;
4956 /* RELOAD_OTHER might be live beyond instruction execution,
4957 but this is not obvious when we set time2 = 1. So check
4958 here if there might be a problem with the new reload
4959 clobbering the register used by the RELOAD_OTHER. */
4960 if (out)
4961 return 0;
4962 break;
4963 default:
4964 return 0;
4965 }
4966 if ((time1 >= time2
4967 && (! rld[i].in || rld[i].out
4968 || ! rtx_equal_p (other_input, value)))
4969 || (out && rld[reloadnum].out_reg
4970 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
4971 return 0;
4972 }
4973 }
4974 }
4975
4976 /* Earlyclobbered outputs must conflict with inputs. */
4977 if (check_earlyclobber && out && earlyclobber_operand_p (out))
4978 return 0;
4979
4980 return 1;
4981}
4982
4983/* Return 1 if the value in reload reg REGNO, as used by a reload
4984 needed for the part of the insn specified by OPNUM and TYPE,
4985 may be used to load VALUE into it.
4986
4987 MODE is the mode in which the register is used, this is needed to
4988 determine how many hard regs to test.
4989
4990 Other read-only reloads with the same value do not conflict
4991 unless OUT is non-zero and these other reloads have to live while
4992 output reloads live.
4993 If OUT is CONST0_RTX, this is a special case: it means that the
4994 test should not be for using register REGNO as reload register, but
4995 for copying from register REGNO into the reload register.
4996
4997 RELOADNUM is the number of the reload we want to load this value for;
4998 a reload does not conflict with itself.
4999
5000 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5001 reloads that load an address for the very reload we are considering.
5002
5003 The caller has to make sure that there is no conflict with the return
5004 register. */
5005
5006static int
5007free_for_value_p (regno, mode, opnum, type, value, out, reloadnum,
5008 ignore_address_reloads)
5009 int regno;
5010 enum machine_mode mode;
5011 int opnum;
5012 enum reload_type type;
5013 rtx value, out;
5014 int reloadnum;
5015 int ignore_address_reloads;
5016{
5017 int nregs = HARD_REGNO_NREGS (regno, mode);
5018 while (nregs-- > 0)
5019 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5020 value, out, reloadnum,
5021 ignore_address_reloads))
5022 return 0;
5023 return 1;
5024}
5025
5026/* Determine whether the reload reg X overlaps any rtx'es used for
5027 overriding inheritance. Return nonzero if so. */
5028
5029static int
5030conflicts_with_override (x)
5031 rtx x;
5032{
5033 int i;
5034 for (i = 0; i < n_reloads; i++)
5035 if (reload_override_in[i]
5036 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5037 return 1;
5038 return 0;
5039}
5040
5041/* Give an error message saying we failed to find a reload for INSN,
5042 and clear out reload R. */
5043static void
5044failed_reload (insn, r)
5045 rtx insn;
5046 int r;
5047{
5048 if (asm_noperands (PATTERN (insn)) < 0)
5049 /* It's the compiler's fault. */
5050 fatal_insn ("could not find a spill register", insn);
5051
5052 /* It's the user's fault; the operand's mode and constraint
5053 don't match. Disable this reload so we don't crash in final. */
5054 error_for_asm (insn,
5055 "`asm' operand constraint incompatible with operand size");
5056 rld[r].in = 0;
5057 rld[r].out = 0;
5058 rld[r].reg_rtx = 0;
5059 rld[r].optional = 1;
5060 rld[r].secondary_p = 1;
5061}
5062
5063/* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5064 for reload R. If it's valid, get an rtx for it. Return nonzero if
5065 successful. */
5066static int
5067set_reload_reg (i, r)
5068 int i, r;
5069{
5070 int regno;
5071 rtx reg = spill_reg_rtx[i];
5072
5073 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5074 spill_reg_rtx[i] = reg
5075 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5076
5077 regno = true_regnum (reg);
5078
5079 /* Detect when the reload reg can't hold the reload mode.
5080 This used to be one `if', but Sequent compiler can't handle that. */
5081 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5082 {
5083 enum machine_mode test_mode = VOIDmode;
5084 if (rld[r].in)
5085 test_mode = GET_MODE (rld[r].in);
5086 /* If rld[r].in has VOIDmode, it means we will load it
5087 in whatever mode the reload reg has: to wit, rld[r].mode.
5088 We have already tested that for validity. */
5089 /* Aside from that, we need to test that the expressions
5090 to reload from or into have modes which are valid for this
5091 reload register. Otherwise the reload insns would be invalid. */
5092 if (! (rld[r].in != 0 && test_mode != VOIDmode
5093 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5094 if (! (rld[r].out != 0
5095 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5096 {
5097 /* The reg is OK. */
5098 last_spill_reg = i;
5099
5100 /* Mark as in use for this insn the reload regs we use
5101 for this. */
5102 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5103 rld[r].when_needed, rld[r].mode);
5104
5105 rld[r].reg_rtx = reg;
5106 reload_spill_index[r] = spill_regs[i];
5107 return 1;
5108 }
5109 }
5110 return 0;
5111}
5112
5113/* Find a spill register to use as a reload register for reload R.
5114 LAST_RELOAD is non-zero if this is the last reload for the insn being
5115 processed.
5116
5117 Set rld[R].reg_rtx to the register allocated.
5118
5119 We return 1 if successful, or 0 if we couldn't find a spill reg and
5120 we didn't change anything. */
5121
5122static int
5123allocate_reload_reg (chain, r, last_reload)
5124 struct insn_chain *chain ATTRIBUTE_UNUSED;
5125 int r;
5126 int last_reload;
5127{
5128 int i, pass, count;
5129
5130 /* If we put this reload ahead, thinking it is a group,
5131 then insist on finding a group. Otherwise we can grab a
5132 reg that some other reload needs.
5133 (That can happen when we have a 68000 DATA_OR_FP_REG
5134 which is a group of data regs or one fp reg.)
5135 We need not be so restrictive if there are no more reloads
5136 for this insn.
5137
5138 ??? Really it would be nicer to have smarter handling
5139 for that kind of reg class, where a problem like this is normal.
5140 Perhaps those classes should be avoided for reloading
5141 by use of more alternatives. */
5142
5143 int force_group = rld[r].nregs > 1 && ! last_reload;
5144
5145 /* If we want a single register and haven't yet found one,
5146 take any reg in the right class and not in use.
5147 If we want a consecutive group, here is where we look for it.
5148
5149 We use two passes so we can first look for reload regs to
5150 reuse, which are already in use for other reloads in this insn,
5151 and only then use additional registers.
5152 I think that maximizing reuse is needed to make sure we don't
5153 run out of reload regs. Suppose we have three reloads, and
5154 reloads A and B can share regs. These need two regs.
5155 Suppose A and B are given different regs.
5156 That leaves none for C. */
5157 for (pass = 0; pass < 2; pass++)
5158 {
5159 /* I is the index in spill_regs.
5160 We advance it round-robin between insns to use all spill regs
5161 equally, so that inherited reloads have a chance
5162 of leapfrogging each other. */
5163
5164 i = last_spill_reg;
5165
5166 for (count = 0; count < n_spills; count++)
5167 {
5168 int class = (int) rld[r].class;
5169 int regnum;
5170
5171 i++;
5172 if (i >= n_spills)
5173 i -= n_spills;
5174 regnum = spill_regs[i];
5175
5176 if ((reload_reg_free_p (regnum, rld[r].opnum,
5177 rld[r].when_needed)
5178 || (rld[r].in
5179 /* We check reload_reg_used to make sure we
5180 don't clobber the return register. */
5181 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5182 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5183 rld[r].when_needed, rld[r].in,
5184 rld[r].out, r, 1)))
5185 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5186 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5187 /* Look first for regs to share, then for unshared. But
5188 don't share regs used for inherited reloads; they are
5189 the ones we want to preserve. */
5190 && (pass
5191 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5192 regnum)
5193 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5194 regnum))))
5195 {
5196 int nr = HARD_REGNO_NREGS (regnum, rld[r].mode);
5197 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5198 (on 68000) got us two FP regs. If NR is 1,
5199 we would reject both of them. */
5200 if (force_group)
5201 nr = rld[r].nregs;
5202 /* If we need only one reg, we have already won. */
5203 if (nr == 1)
5204 {
5205 /* But reject a single reg if we demand a group. */
5206 if (force_group)
5207 continue;
5208 break;
5209 }
5210 /* Otherwise check that as many consecutive regs as we need
5211 are available here. */
5212 while (nr > 1)
5213 {
5214 int regno = regnum + nr - 1;
5215 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5216 && spill_reg_order[regno] >= 0
5217 && reload_reg_free_p (regno, rld[r].opnum,
5218 rld[r].when_needed)))
5219 break;
5220 nr--;
5221 }
5222 if (nr == 1)
5223 break;
5224 }
5225 }
5226
5227 /* If we found something on pass 1, omit pass 2. */
5228 if (count < n_spills)
5229 break;
5230 }
5231
5232 /* We should have found a spill register by now. */
5233 if (count >= n_spills)
5234 return 0;
5235
5236 /* I is the index in SPILL_REG_RTX of the reload register we are to
5237 allocate. Get an rtx for it and find its register number. */
5238
5239 return set_reload_reg (i, r);
5240}
5241
5242/* Initialize all the tables needed to allocate reload registers.
5243 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5244 is the array we use to restore the reg_rtx field for every reload. */
5245
5246static void
5247choose_reload_regs_init (chain, save_reload_reg_rtx)
5248 struct insn_chain *chain;
5249 rtx *save_reload_reg_rtx;
5250{
5251 int i;
5252
5253 for (i = 0; i < n_reloads; i++)
5254 rld[i].reg_rtx = save_reload_reg_rtx[i];
5255
5256 memset (reload_inherited, 0, MAX_RELOADS);
5257 memset ((char *) reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5258 memset ((char *) reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5259
5260 CLEAR_HARD_REG_SET (reload_reg_used);
5261 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5262 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5263 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5264 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5265 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5266
5267 CLEAR_HARD_REG_SET (reg_used_in_insn);
5268 {
5269 HARD_REG_SET tmp;
5270 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5271 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5272 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5273 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5274 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5275 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5276 }
5277
5278 for (i = 0; i < reload_n_operands; i++)
5279 {
5280 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5281 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5282 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5283 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5284 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5285 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5286 }
5287
5288 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5289
5290 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5291
5292 for (i = 0; i < n_reloads; i++)
5293 /* If we have already decided to use a certain register,
5294 don't use it in another way. */
5295 if (rld[i].reg_rtx)
5296 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5297 rld[i].when_needed, rld[i].mode);
5298}
5299
5300/* Assign hard reg targets for the pseudo-registers we must reload
5301 into hard regs for this insn.
5302 Also output the instructions to copy them in and out of the hard regs.
5303
5304 For machines with register classes, we are responsible for
5305 finding a reload reg in the proper class. */
5306
5307static void
5308choose_reload_regs (chain)
5309 struct insn_chain *chain;
5310{
5311 rtx insn = chain->insn;
5312 int i, j;
5313 unsigned int max_group_size = 1;
5314 enum reg_class group_class = NO_REGS;
5315 int pass, win, inheritance;
5316
5317 rtx save_reload_reg_rtx[MAX_RELOADS];
5318
5319 /* In order to be certain of getting the registers we need,
5320 we must sort the reloads into order of increasing register class.
5321 Then our grabbing of reload registers will parallel the process
5322 that provided the reload registers.
5323
5324 Also note whether any of the reloads wants a consecutive group of regs.
5325 If so, record the maximum size of the group desired and what
5326 register class contains all the groups needed by this insn. */
5327
5328 for (j = 0; j < n_reloads; j++)
5329 {
5330 reload_order[j] = j;
5331 reload_spill_index[j] = -1;
5332
5333 if (rld[j].nregs > 1)
5334 {
5335 max_group_size = MAX (rld[j].nregs, max_group_size);
5336 group_class
5337 = reg_class_superunion[(int) rld[j].class][(int) group_class];
5338 }
5339
5340 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5341 }
5342
5343 if (n_reloads > 1)
5344 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5345
5346 /* If -O, try first with inheritance, then turning it off.
5347 If not -O, don't do inheritance.
5348 Using inheritance when not optimizing leads to paradoxes
5349 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5350 because one side of the comparison might be inherited. */
5351 win = 0;
5352 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5353 {
5354 choose_reload_regs_init (chain, save_reload_reg_rtx);
5355
5356 /* Process the reloads in order of preference just found.
5357 Beyond this point, subregs can be found in reload_reg_rtx.
5358
5359 This used to look for an existing reloaded home for all of the
5360 reloads, and only then perform any new reloads. But that could lose
5361 if the reloads were done out of reg-class order because a later
5362 reload with a looser constraint might have an old home in a register
5363 needed by an earlier reload with a tighter constraint.
5364
5365 To solve this, we make two passes over the reloads, in the order
5366 described above. In the first pass we try to inherit a reload
5367 from a previous insn. If there is a later reload that needs a
5368 class that is a proper subset of the class being processed, we must
5369 also allocate a spill register during the first pass.
5370
5371 Then make a second pass over the reloads to allocate any reloads
5372 that haven't been given registers yet. */
5373
5374 for (j = 0; j < n_reloads; j++)
5375 {
5376 int r = reload_order[j];
5377 rtx search_equiv = NULL_RTX;
5378
5379 /* Ignore reloads that got marked inoperative. */
5380 if (rld[r].out == 0 && rld[r].in == 0
5381 && ! rld[r].secondary_p)
5382 continue;
5383
5384 /* If find_reloads chose to use reload_in or reload_out as a reload
5385 register, we don't need to chose one. Otherwise, try even if it
5386 found one since we might save an insn if we find the value lying
5387 around.
5388 Try also when reload_in is a pseudo without a hard reg. */
5389 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5390 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5391 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5392 && GET_CODE (rld[r].in) != MEM
5393 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5394 continue;
5395
5396#if 0 /* No longer needed for correct operation.
5397 It might give better code, or might not; worth an experiment? */
5398 /* If this is an optional reload, we can't inherit from earlier insns
5399 until we are sure that any non-optional reloads have been allocated.
5400 The following code takes advantage of the fact that optional reloads
5401 are at the end of reload_order. */
5402 if (rld[r].optional != 0)
5403 for (i = 0; i < j; i++)
5404 if ((rld[reload_order[i]].out != 0
5405 || rld[reload_order[i]].in != 0
5406 || rld[reload_order[i]].secondary_p)
5407 && ! rld[reload_order[i]].optional
5408 && rld[reload_order[i]].reg_rtx == 0)
5409 allocate_reload_reg (chain, reload_order[i], 0);
5410#endif
5411
5412 /* First see if this pseudo is already available as reloaded
5413 for a previous insn. We cannot try to inherit for reloads
5414 that are smaller than the maximum number of registers needed
5415 for groups unless the register we would allocate cannot be used
5416 for the groups.
5417
5418 We could check here to see if this is a secondary reload for
5419 an object that is already in a register of the desired class.
5420 This would avoid the need for the secondary reload register.
5421 But this is complex because we can't easily determine what
5422 objects might want to be loaded via this reload. So let a
5423 register be allocated here. In `emit_reload_insns' we suppress
5424 one of the loads in the case described above. */
5425
5426 if (inheritance)
5427 {
5428 int byte = 0;
5429 int regno = -1;
5430 enum machine_mode mode = VOIDmode;
5431
5432 if (rld[r].in == 0)
5433 ;
5434 else if (GET_CODE (rld[r].in) == REG)
5435 {
5436 regno = REGNO (rld[r].in);
5437 mode = GET_MODE (rld[r].in);
5438 }
5439 else if (GET_CODE (rld[r].in_reg) == REG)
5440 {
5441 regno = REGNO (rld[r].in_reg);
5442 mode = GET_MODE (rld[r].in_reg);
5443 }
5444 else if (GET_CODE (rld[r].in_reg) == SUBREG
5445 && GET_CODE (SUBREG_REG (rld[r].in_reg)) == REG)
5446 {
5447 byte = SUBREG_BYTE (rld[r].in_reg);
5448 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5449 if (regno < FIRST_PSEUDO_REGISTER)
5450 regno = subreg_regno (rld[r].in_reg);
5451 mode = GET_MODE (rld[r].in_reg);
5452 }
5453#ifdef AUTO_INC_DEC
5454 else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5455 || GET_CODE (rld[r].in_reg) == PRE_DEC
5456 || GET_CODE (rld[r].in_reg) == POST_INC
5457 || GET_CODE (rld[r].in_reg) == POST_DEC)
5458 && GET_CODE (XEXP (rld[r].in_reg, 0)) == REG)
5459 {
5460 regno = REGNO (XEXP (rld[r].in_reg, 0));
5461 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5462 rld[r].out = rld[r].in;
5463 }
5464#endif
5465#if 0
5466 /* This won't work, since REGNO can be a pseudo reg number.
5467 Also, it takes much more hair to keep track of all the things
5468 that can invalidate an inherited reload of part of a pseudoreg. */
5469 else if (GET_CODE (rld[r].in) == SUBREG
5470 && GET_CODE (SUBREG_REG (rld[r].in)) == REG)
5471 regno = subreg_regno (rld[r].in);
5472#endif
5473
5474 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5475 {
5476 enum reg_class class = rld[r].class, last_class;
5477 rtx last_reg = reg_last_reload_reg[regno];
5478 enum machine_mode need_mode;
5479
5480 i = REGNO (last_reg);
5481 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5482 last_class = REGNO_REG_CLASS (i);
5483
5484 if (byte == 0)
5485 need_mode = mode;
5486 else
5487 need_mode
5488 = smallest_mode_for_size (GET_MODE_SIZE (mode) + byte,
5489 GET_MODE_CLASS (mode));
5490
5491 if (
5492#ifdef CLASS_CANNOT_CHANGE_MODE
5493 (TEST_HARD_REG_BIT
5494 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE], i)
5495 ? ! CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (last_reg),
5496 need_mode)
5497 : (GET_MODE_SIZE (GET_MODE (last_reg))
5498 >= GET_MODE_SIZE (need_mode)))
5499#else
5500 (GET_MODE_SIZE (GET_MODE (last_reg))
5501 >= GET_MODE_SIZE (need_mode))
5502#endif
5503 && reg_reloaded_contents[i] == regno
5504 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5505 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5506 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5507 /* Even if we can't use this register as a reload
5508 register, we might use it for reload_override_in,
5509 if copying it to the desired class is cheap
5510 enough. */
5511 || ((REGISTER_MOVE_COST (mode, last_class, class)
5512 < MEMORY_MOVE_COST (mode, class, 1))
5513#ifdef SECONDARY_INPUT_RELOAD_CLASS
5514 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5515 last_reg)
5516 == NO_REGS)
5517#endif
5518#ifdef SECONDARY_MEMORY_NEEDED
5519 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5520 mode)
5521#endif
5522 ))
5523
5524 && (rld[r].nregs == max_group_size
5525 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5526 i))
5527 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5528 rld[r].when_needed, rld[r].in,
5529 const0_rtx, r, 1))
5530 {
5531 /* If a group is needed, verify that all the subsequent
5532 registers still have their values intact. */
5533 int nr = HARD_REGNO_NREGS (i, rld[r].mode);
5534 int k;
5535
5536 for (k = 1; k < nr; k++)
5537 if (reg_reloaded_contents[i + k] != regno
5538 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5539 break;
5540
5541 if (k == nr)
5542 {
5543 int i1;
5544 int bad_for_class;
5545
5546 last_reg = (GET_MODE (last_reg) == mode
5547 ? last_reg : gen_rtx_REG (mode, i));
5548
5549 bad_for_class = 0;
5550 for (k = 0; k < nr; k++)
5551 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5552 i+k);
5553
5554 /* We found a register that contains the
5555 value we need. If this register is the
5556 same as an `earlyclobber' operand of the
5557 current insn, just mark it as a place to
5558 reload from since we can't use it as the
5559 reload register itself. */
5560
5561 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5562 if (reg_overlap_mentioned_for_reload_p
5563 (reg_last_reload_reg[regno],
5564 reload_earlyclobbers[i1]))
5565 break;
5566
5567 if (i1 != n_earlyclobbers
5568 || ! (free_for_value_p (i, rld[r].mode,
5569 rld[r].opnum,
5570 rld[r].when_needed, rld[r].in,
5571 rld[r].out, r, 1))
5572 /* Don't use it if we'd clobber a pseudo reg. */
5573 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5574 && rld[r].out
5575 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5576 /* Don't clobber the frame pointer. */
5577 || (i == HARD_FRAME_POINTER_REGNUM
5578 && frame_pointer_needed
5579 && rld[r].out)
5580 /* Don't really use the inherited spill reg
5581 if we need it wider than we've got it. */
5582 || (GET_MODE_SIZE (rld[r].mode)
5583 > GET_MODE_SIZE (mode))
5584 || bad_for_class
5585
5586 /* If find_reloads chose reload_out as reload
5587 register, stay with it - that leaves the
5588 inherited register for subsequent reloads. */
5589 || (rld[r].out && rld[r].reg_rtx
5590 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5591 {
5592 if (! rld[r].optional)
5593 {
5594 reload_override_in[r] = last_reg;
5595 reload_inheritance_insn[r]
5596 = reg_reloaded_insn[i];
5597 }
5598 }
5599 else
5600 {
5601 int k;
5602 /* We can use this as a reload reg. */
5603 /* Mark the register as in use for this part of
5604 the insn. */
5605 mark_reload_reg_in_use (i,
5606 rld[r].opnum,
5607 rld[r].when_needed,
5608 rld[r].mode);
5609 rld[r].reg_rtx = last_reg;
5610 reload_inherited[r] = 1;
5611 reload_inheritance_insn[r]
5612 = reg_reloaded_insn[i];
5613 reload_spill_index[r] = i;
5614 for (k = 0; k < nr; k++)
5615 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5616 i + k);
5617 }
5618 }
5619 }
5620 }
5621 }
5622
5623 /* Here's another way to see if the value is already lying around. */
5624 if (inheritance
5625 && rld[r].in != 0
5626 && ! reload_inherited[r]
5627 && rld[r].out == 0
5628 && (CONSTANT_P (rld[r].in)
5629 || GET_CODE (rld[r].in) == PLUS
5630 || GET_CODE (rld[r].in) == REG
5631 || GET_CODE (rld[r].in) == MEM)
5632 && (rld[r].nregs == max_group_size
5633 || ! reg_classes_intersect_p (rld[r].class, group_class)))
5634 search_equiv = rld[r].in;
5635 /* If this is an output reload from a simple move insn, look
5636 if an equivalence for the input is available. */
5637 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5638 {
5639 rtx set = single_set (insn);
5640
5641 if (set
5642 && rtx_equal_p (rld[r].out, SET_DEST (set))
5643 && CONSTANT_P (SET_SRC (set)))
5644 search_equiv = SET_SRC (set);
5645 }
5646
5647 if (search_equiv)
5648 {
5649 rtx equiv
5650 = find_equiv_reg (search_equiv, insn, rld[r].class,
5651 -1, NULL, 0, rld[r].mode);
5652 int regno = 0;
5653
5654 if (equiv != 0)
5655 {
5656 if (GET_CODE (equiv) == REG)
5657 regno = REGNO (equiv);
5658 else if (GET_CODE (equiv) == SUBREG)
5659 {
5660 /* This must be a SUBREG of a hard register.
5661 Make a new REG since this might be used in an
5662 address and not all machines support SUBREGs
5663 there. */
5664 regno = subreg_regno (equiv);
5665 equiv = gen_rtx_REG (rld[r].mode, regno);
5666 }
5667 else
5668 abort ();
5669 }
5670
5671 /* If we found a spill reg, reject it unless it is free
5672 and of the desired class. */
5673 if (equiv != 0
5674 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
5675 && ! free_for_value_p (regno, rld[r].mode,
5676 rld[r].opnum, rld[r].when_needed,
5677 rld[r].in, rld[r].out, r, 1))
5678 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5679 regno)))
5680 equiv = 0;
5681
5682 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5683 equiv = 0;
5684
5685 /* We found a register that contains the value we need.
5686 If this register is the same as an `earlyclobber' operand
5687 of the current insn, just mark it as a place to reload from
5688 since we can't use it as the reload register itself. */
5689
5690 if (equiv != 0)
5691 for (i = 0; i < n_earlyclobbers; i++)
5692 if (reg_overlap_mentioned_for_reload_p (equiv,
5693 reload_earlyclobbers[i]))
5694 {
5695 if (! rld[r].optional)
5696 reload_override_in[r] = equiv;
5697 equiv = 0;
5698 break;
5699 }
5700
5701 /* If the equiv register we have found is explicitly clobbered
5702 in the current insn, it depends on the reload type if we
5703 can use it, use it for reload_override_in, or not at all.
5704 In particular, we then can't use EQUIV for a
5705 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5706
5707 if (equiv != 0)
5708 {
5709 if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
5710 switch (rld[r].when_needed)
5711 {
5712 case RELOAD_FOR_OTHER_ADDRESS:
5713 case RELOAD_FOR_INPADDR_ADDRESS:
5714 case RELOAD_FOR_INPUT_ADDRESS:
5715 case RELOAD_FOR_OPADDR_ADDR:
5716 break;
5717 case RELOAD_OTHER:
5718 case RELOAD_FOR_INPUT:
5719 case RELOAD_FOR_OPERAND_ADDRESS:
5720 if (! rld[r].optional)
5721 reload_override_in[r] = equiv;
5722 /* Fall through. */
5723 default:
5724 equiv = 0;
5725 break;
5726 }
5727 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5728 switch (rld[r].when_needed)
5729 {
5730 case RELOAD_FOR_OTHER_ADDRESS:
5731 case RELOAD_FOR_INPADDR_ADDRESS:
5732 case RELOAD_FOR_INPUT_ADDRESS:
5733 case RELOAD_FOR_OPADDR_ADDR:
5734 case RELOAD_FOR_OPERAND_ADDRESS:
5735 case RELOAD_FOR_INPUT:
5736 break;
5737 case RELOAD_OTHER:
5738 if (! rld[r].optional)
5739 reload_override_in[r] = equiv;
5740 /* Fall through. */
5741 default:
5742 equiv = 0;
5743 break;
5744 }
5745 }
5746
5747 /* If we found an equivalent reg, say no code need be generated
5748 to load it, and use it as our reload reg. */
5749 if (equiv != 0
5750 && (regno != HARD_FRAME_POINTER_REGNUM
5751 || !frame_pointer_needed))
5752 {
5753 int nr = HARD_REGNO_NREGS (regno, rld[r].mode);
5754 int k;
5755 rld[r].reg_rtx = equiv;
5756 reload_inherited[r] = 1;
5757
5758 /* If reg_reloaded_valid is not set for this register,
5759 there might be a stale spill_reg_store lying around.
5760 We must clear it, since otherwise emit_reload_insns
5761 might delete the store. */
5762 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5763 spill_reg_store[regno] = NULL_RTX;
5764 /* If any of the hard registers in EQUIV are spill
5765 registers, mark them as in use for this insn. */
5766 for (k = 0; k < nr; k++)
5767 {
5768 i = spill_reg_order[regno + k];
5769 if (i >= 0)
5770 {
5771 mark_reload_reg_in_use (regno, rld[r].opnum,
5772 rld[r].when_needed,
5773 rld[r].mode);
5774 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5775 regno + k);
5776 }
5777 }
5778 }
5779 }
5780
5781 /* If we found a register to use already, or if this is an optional
5782 reload, we are done. */
5783 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5784 continue;
5785
5786#if 0
5787 /* No longer needed for correct operation. Might or might
5788 not give better code on the average. Want to experiment? */
5789
5790 /* See if there is a later reload that has a class different from our
5791 class that intersects our class or that requires less register
5792 than our reload. If so, we must allocate a register to this
5793 reload now, since that reload might inherit a previous reload
5794 and take the only available register in our class. Don't do this
5795 for optional reloads since they will force all previous reloads
5796 to be allocated. Also don't do this for reloads that have been
5797 turned off. */
5798
5799 for (i = j + 1; i < n_reloads; i++)
5800 {
5801 int s = reload_order[i];
5802
5803 if ((rld[s].in == 0 && rld[s].out == 0
5804 && ! rld[s].secondary_p)
5805 || rld[s].optional)
5806 continue;
5807
5808 if ((rld[s].class != rld[r].class
5809 && reg_classes_intersect_p (rld[r].class,
5810 rld[s].class))
5811 || rld[s].nregs < rld[r].nregs)
5812 break;
5813 }
5814
5815 if (i == n_reloads)
5816 continue;
5817
5818 allocate_reload_reg (chain, r, j == n_reloads - 1);
5819#endif
5820 }
5821
5822 /* Now allocate reload registers for anything non-optional that
5823 didn't get one yet. */
5824 for (j = 0; j < n_reloads; j++)
5825 {
5826 int r = reload_order[j];
5827
5828 /* Ignore reloads that got marked inoperative. */
5829 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5830 continue;
5831
5832 /* Skip reloads that already have a register allocated or are
5833 optional. */
5834 if (rld[r].reg_rtx != 0 || rld[r].optional)
5835 continue;
5836
5837 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5838 break;
5839 }
5840
5841 /* If that loop got all the way, we have won. */
5842 if (j == n_reloads)
5843 {
5844 win = 1;
5845 break;
5846 }
5847
5848 /* Loop around and try without any inheritance. */
5849 }
5850
5851 if (! win)
5852 {
5853 /* First undo everything done by the failed attempt
5854 to allocate with inheritance. */
5855 choose_reload_regs_init (chain, save_reload_reg_rtx);
5856
5857 /* Some sanity tests to verify that the reloads found in the first
5858 pass are identical to the ones we have now. */
5859 if (chain->n_reloads != n_reloads)
5860 abort ();
5861
5862 for (i = 0; i < n_reloads; i++)
5863 {
5864 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
5865 continue;
5866 if (chain->rld[i].when_needed != rld[i].when_needed)
5867 abort ();
5868 for (j = 0; j < n_spills; j++)
5869 if (spill_regs[j] == chain->rld[i].regno)
5870 if (! set_reload_reg (j, i))
5871 failed_reload (chain->insn, i);
5872 }
5873 }
5874
5875 /* If we thought we could inherit a reload, because it seemed that
5876 nothing else wanted the same reload register earlier in the insn,
5877 verify that assumption, now that all reloads have been assigned.
5878 Likewise for reloads where reload_override_in has been set. */
5879
5880 /* If doing expensive optimizations, do one preliminary pass that doesn't
5881 cancel any inheritance, but removes reloads that have been needed only
5882 for reloads that we know can be inherited. */
5883 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
5884 {
5885 for (j = 0; j < n_reloads; j++)
5886 {
5887 int r = reload_order[j];
5888 rtx check_reg;
5889 if (reload_inherited[r] && rld[r].reg_rtx)
5890 check_reg = rld[r].reg_rtx;
5891 else if (reload_override_in[r]
5892 && (GET_CODE (reload_override_in[r]) == REG
5893 || GET_CODE (reload_override_in[r]) == SUBREG))
5894 check_reg = reload_override_in[r];
5895 else
5896 continue;
5897 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
5898 rld[r].opnum, rld[r].when_needed, rld[r].in,
5899 (reload_inherited[r]
5900 ? rld[r].out : const0_rtx),
5901 r, 1))
5902 {
5903 if (pass)
5904 continue;
5905 reload_inherited[r] = 0;
5906 reload_override_in[r] = 0;
5907 }
5908 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5909 reload_override_in, then we do not need its related
5910 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5911 likewise for other reload types.
5912 We handle this by removing a reload when its only replacement
5913 is mentioned in reload_in of the reload we are going to inherit.
5914 A special case are auto_inc expressions; even if the input is
5915 inherited, we still need the address for the output. We can
5916 recognize them because they have RELOAD_OUT set to RELOAD_IN.
5917 If we succeeded removing some reload and we are doing a preliminary
5918 pass just to remove such reloads, make another pass, since the
5919 removal of one reload might allow us to inherit another one. */
5920 else if (rld[r].in
5921 && rld[r].out != rld[r].in
5922 && remove_address_replacements (rld[r].in) && pass)
5923 pass = 2;
5924 }
5925 }
5926
5927 /* Now that reload_override_in is known valid,
5928 actually override reload_in. */
5929 for (j = 0; j < n_reloads; j++)
5930 if (reload_override_in[j])
5931 rld[j].in = reload_override_in[j];
5932
5933 /* If this reload won't be done because it has been cancelled or is
5934 optional and not inherited, clear reload_reg_rtx so other
5935 routines (such as subst_reloads) don't get confused. */
5936 for (j = 0; j < n_reloads; j++)
5937 if (rld[j].reg_rtx != 0
5938 && ((rld[j].optional && ! reload_inherited[j])
5939 || (rld[j].in == 0 && rld[j].out == 0
5940 && ! rld[j].secondary_p)))
5941 {
5942 int regno = true_regnum (rld[j].reg_rtx);
5943
5944 if (spill_reg_order[regno] >= 0)
5945 clear_reload_reg_in_use (regno, rld[j].opnum,
5946 rld[j].when_needed, rld[j].mode);
5947 rld[j].reg_rtx = 0;
5948 reload_spill_index[j] = -1;
5949 }
5950
5951 /* Record which pseudos and which spill regs have output reloads. */
5952 for (j = 0; j < n_reloads; j++)
5953 {
5954 int r = reload_order[j];
5955
5956 i = reload_spill_index[r];
5957
5958 /* I is nonneg if this reload uses a register.
5959 If rld[r].reg_rtx is 0, this is an optional reload
5960 that we opted to ignore. */
5961 if (rld[r].out_reg != 0 && GET_CODE (rld[r].out_reg) == REG
5962 && rld[r].reg_rtx != 0)
5963 {
5964 int nregno = REGNO (rld[r].out_reg);
5965 int nr = 1;
5966
5967 if (nregno < FIRST_PSEUDO_REGISTER)
5968 nr = HARD_REGNO_NREGS (nregno, rld[r].mode);
5969
5970 while (--nr >= 0)
5971 reg_has_output_reload[nregno + nr] = 1;
5972
5973 if (i >= 0)
5974 {
5975 nr = HARD_REGNO_NREGS (i, rld[r].mode);
5976 while (--nr >= 0)
5977 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
5978 }
5979
5980 if (rld[r].when_needed != RELOAD_OTHER
5981 && rld[r].when_needed != RELOAD_FOR_OUTPUT
5982 && rld[r].when_needed != RELOAD_FOR_INSN)
5983 abort ();
5984 }
5985 }
5986}
5987
5988/* Deallocate the reload register for reload R. This is called from
5989 remove_address_replacements. */
5990
5991void
5992deallocate_reload_reg (r)
5993 int r;
5994{
5995 int regno;
5996
5997 if (! rld[r].reg_rtx)
5998 return;
5999 regno = true_regnum (rld[r].reg_rtx);
6000 rld[r].reg_rtx = 0;
6001 if (spill_reg_order[regno] >= 0)
6002 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6003 rld[r].mode);
6004 reload_spill_index[r] = -1;
6005}
6006
6007/* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6008 reloads of the same item for fear that we might not have enough reload
6009 registers. However, normally they will get the same reload register
6010 and hence actually need not be loaded twice.
6011
6012 Here we check for the most common case of this phenomenon: when we have
6013 a number of reloads for the same object, each of which were allocated
6014 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6015 reload, and is not modified in the insn itself. If we find such,
6016 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6017 This will not increase the number of spill registers needed and will
6018 prevent redundant code. */
6019
6020static void
6021merge_assigned_reloads (insn)
6022 rtx insn;
6023{
6024 int i, j;
6025
6026 /* Scan all the reloads looking for ones that only load values and
6027 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6028 assigned and not modified by INSN. */
6029
6030 for (i = 0; i < n_reloads; i++)
6031 {
6032 int conflicting_input = 0;
6033 int max_input_address_opnum = -1;
6034 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6035
6036 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6037 || rld[i].out != 0 || rld[i].reg_rtx == 0
6038 || reg_set_p (rld[i].reg_rtx, insn))
6039 continue;
6040
6041 /* Look at all other reloads. Ensure that the only use of this
6042 reload_reg_rtx is in a reload that just loads the same value
6043 as we do. Note that any secondary reloads must be of the identical
6044 class since the values, modes, and result registers are the
6045 same, so we need not do anything with any secondary reloads. */
6046
6047 for (j = 0; j < n_reloads; j++)
6048 {
6049 if (i == j || rld[j].reg_rtx == 0
6050 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6051 rld[i].reg_rtx))
6052 continue;
6053
6054 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6055 && rld[j].opnum > max_input_address_opnum)
6056 max_input_address_opnum = rld[j].opnum;
6057
6058 /* If the reload regs aren't exactly the same (e.g, different modes)
6059 or if the values are different, we can't merge this reload.
6060 But if it is an input reload, we might still merge
6061 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6062
6063 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6064 || rld[j].out != 0 || rld[j].in == 0
6065 || ! rtx_equal_p (rld[i].in, rld[j].in))
6066 {
6067 if (rld[j].when_needed != RELOAD_FOR_INPUT
6068 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6069 || rld[i].opnum > rld[j].opnum)
6070 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6071 break;
6072 conflicting_input = 1;
6073 if (min_conflicting_input_opnum > rld[j].opnum)
6074 min_conflicting_input_opnum = rld[j].opnum;
6075 }
6076 }
6077
6078 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6079 we, in fact, found any matching reloads. */
6080
6081 if (j == n_reloads
6082 && max_input_address_opnum <= min_conflicting_input_opnum)
6083 {
6084 for (j = 0; j < n_reloads; j++)
6085 if (i != j && rld[j].reg_rtx != 0
6086 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6087 && (! conflicting_input
6088 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6089 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6090 {
6091 rld[i].when_needed = RELOAD_OTHER;
6092 rld[j].in = 0;
6093 reload_spill_index[j] = -1;
6094 transfer_replacements (i, j);
6095 }
6096
6097 /* If this is now RELOAD_OTHER, look for any reloads that load
6098 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6099 if they were for inputs, RELOAD_OTHER for outputs. Note that
6100 this test is equivalent to looking for reloads for this operand
6101 number. */
6102
6103 if (rld[i].when_needed == RELOAD_OTHER)
6104 for (j = 0; j < n_reloads; j++)
6105 if (rld[j].in != 0
6106 && rld[j].when_needed != RELOAD_OTHER
6107 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6108 rld[i].in))
6109 rld[j].when_needed
6110 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6111 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6112 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6113 }
6114 }
6115}
6116
6117/* These arrays are filled by emit_reload_insns and its subroutines. */
6118static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6119static rtx other_input_address_reload_insns = 0;
6120static rtx other_input_reload_insns = 0;
6121static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6122static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6123static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6124static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6125static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6126static rtx operand_reload_insns = 0;
6127static rtx other_operand_reload_insns = 0;
6128static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6129
6130/* Values to be put in spill_reg_store are put here first. */
6131static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6132static HARD_REG_SET reg_reloaded_died;
6133
6134/* Generate insns to perform reload RL, which is for the insn in CHAIN and
6135 has the number J. OLD contains the value to be used as input. */
6136
6137static void
6138emit_input_reload_insns (chain, rl, old, j)
6139 struct insn_chain *chain;
6140 struct reload *rl;
6141 rtx old;
6142 int j;
6143{
6144 rtx insn = chain->insn;
6145 rtx reloadreg = rl->reg_rtx;
6146 rtx oldequiv_reg = 0;
6147 rtx oldequiv = 0;
6148 int special = 0;
6149 enum machine_mode mode;
6150 rtx *where;
6151
6152 /* Determine the mode to reload in.
6153 This is very tricky because we have three to choose from.
6154 There is the mode the insn operand wants (rl->inmode).
6155 There is the mode of the reload register RELOADREG.
6156 There is the intrinsic mode of the operand, which we could find
6157 by stripping some SUBREGs.
6158 It turns out that RELOADREG's mode is irrelevant:
6159 we can change that arbitrarily.
6160
6161 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6162 then the reload reg may not support QImode moves, so use SImode.
6163 If foo is in memory due to spilling a pseudo reg, this is safe,
6164 because the QImode value is in the least significant part of a
6165 slot big enough for a SImode. If foo is some other sort of
6166 memory reference, then it is impossible to reload this case,
6167 so previous passes had better make sure this never happens.
6168
6169 Then consider a one-word union which has SImode and one of its
6170 members is a float, being fetched as (SUBREG:SF union:SI).
6171 We must fetch that as SFmode because we could be loading into
6172 a float-only register. In this case OLD's mode is correct.
6173
6174 Consider an immediate integer: it has VOIDmode. Here we need
6175 to get a mode from something else.
6176
6177 In some cases, there is a fourth mode, the operand's
6178 containing mode. If the insn specifies a containing mode for
6179 this operand, it overrides all others.
6180
6181 I am not sure whether the algorithm here is always right,
6182 but it does the right things in those cases. */
6183
6184 mode = GET_MODE (old);
6185 if (mode == VOIDmode)
6186 mode = rl->inmode;
6187
6188#ifdef SECONDARY_INPUT_RELOAD_CLASS
6189 /* If we need a secondary register for this operation, see if
6190 the value is already in a register in that class. Don't
6191 do this if the secondary register will be used as a scratch
6192 register. */
6193
6194 if (rl->secondary_in_reload >= 0
6195 && rl->secondary_in_icode == CODE_FOR_nothing
6196 && optimize)
6197 oldequiv
6198 = find_equiv_reg (old, insn,
6199 rld[rl->secondary_in_reload].class,
6200 -1, NULL, 0, mode);
6201#endif
6202
6203 /* If reloading from memory, see if there is a register
6204 that already holds the same value. If so, reload from there.
6205 We can pass 0 as the reload_reg_p argument because
6206 any other reload has either already been emitted,
6207 in which case find_equiv_reg will see the reload-insn,
6208 or has yet to be emitted, in which case it doesn't matter
6209 because we will use this equiv reg right away. */
6210
6211 if (oldequiv == 0 && optimize
6212 && (GET_CODE (old) == MEM
6213 || (GET_CODE (old) == REG
6214 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6215 && reg_renumber[REGNO (old)] < 0)))
6216 oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
6217
6218 if (oldequiv)
6219 {
6220 unsigned int regno = true_regnum (oldequiv);
6221
6222 /* Don't use OLDEQUIV if any other reload changes it at an
6223 earlier stage of this insn or at this stage. */
6224 if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
6225 rl->in, const0_rtx, j, 0))
6226 oldequiv = 0;
6227
6228 /* If it is no cheaper to copy from OLDEQUIV into the
6229 reload register than it would be to move from memory,
6230 don't use it. Likewise, if we need a secondary register
6231 or memory. */
6232
6233 if (oldequiv != 0
6234 && ((REGNO_REG_CLASS (regno) != rl->class
6235 && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6236 rl->class)
6237 >= MEMORY_MOVE_COST (mode, rl->class, 1)))
6238#ifdef SECONDARY_INPUT_RELOAD_CLASS
6239 || (SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6240 mode, oldequiv)
6241 != NO_REGS)
6242#endif
6243#ifdef SECONDARY_MEMORY_NEEDED
6244 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6245 rl->class,
6246 mode)
6247#endif
6248 ))
6249 oldequiv = 0;
6250 }
6251
6252 /* delete_output_reload is only invoked properly if old contains
6253 the original pseudo register. Since this is replaced with a
6254 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6255 find the pseudo in RELOAD_IN_REG. */
6256 if (oldequiv == 0
6257 && reload_override_in[j]
6258 && GET_CODE (rl->in_reg) == REG)
6259 {
6260 oldequiv = old;
6261 old = rl->in_reg;
6262 }
6263 if (oldequiv == 0)
6264 oldequiv = old;
6265 else if (GET_CODE (oldequiv) == REG)
6266 oldequiv_reg = oldequiv;
6267 else if (GET_CODE (oldequiv) == SUBREG)
6268 oldequiv_reg = SUBREG_REG (oldequiv);
6269
6270 /* If we are reloading from a register that was recently stored in
6271 with an output-reload, see if we can prove there was
6272 actually no need to store the old value in it. */
6273
6274 if (optimize && GET_CODE (oldequiv) == REG
6275 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6276 && spill_reg_store[REGNO (oldequiv)]
6277 && GET_CODE (old) == REG
6278 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6279 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6280 rl->out_reg)))
6281 delete_output_reload (insn, j, REGNO (oldequiv));
6282
6283 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6284 then load RELOADREG from OLDEQUIV. Note that we cannot use
6285 gen_lowpart_common since it can do the wrong thing when
6286 RELOADREG has a multi-word mode. Note that RELOADREG
6287 must always be a REG here. */
6288
6289 if (GET_MODE (reloadreg) != mode)
6290 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6291 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6292 oldequiv = SUBREG_REG (oldequiv);
6293 if (GET_MODE (oldequiv) != VOIDmode
6294 && mode != GET_MODE (oldequiv))
6295 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6296
6297 /* Switch to the right place to emit the reload insns. */
6298 switch (rl->when_needed)
6299 {
6300 case RELOAD_OTHER:
6301 where = &other_input_reload_insns;
6302 break;
6303 case RELOAD_FOR_INPUT:
6304 where = &input_reload_insns[rl->opnum];
6305 break;
6306 case RELOAD_FOR_INPUT_ADDRESS:
6307 where = &input_address_reload_insns[rl->opnum];
6308 break;
6309 case RELOAD_FOR_INPADDR_ADDRESS:
6310 where = &inpaddr_address_reload_insns[rl->opnum];
6311 break;
6312 case RELOAD_FOR_OUTPUT_ADDRESS:
6313 where = &output_address_reload_insns[rl->opnum];
6314 break;
6315 case RELOAD_FOR_OUTADDR_ADDRESS:
6316 where = &outaddr_address_reload_insns[rl->opnum];
6317 break;
6318 case RELOAD_FOR_OPERAND_ADDRESS:
6319 where = &operand_reload_insns;
6320 break;
6321 case RELOAD_FOR_OPADDR_ADDR:
6322 where = &other_operand_reload_insns;
6323 break;
6324 case RELOAD_FOR_OTHER_ADDRESS:
6325 where = &other_input_address_reload_insns;
6326 break;
6327 default:
6328 abort ();
6329 }
6330
6331 push_to_sequence (*where);
6332
6333 /* Auto-increment addresses must be reloaded in a special way. */
6334 if (rl->out && ! rl->out_reg)
6335 {
6336 /* We are not going to bother supporting the case where a
6337 incremented register can't be copied directly from
6338 OLDEQUIV since this seems highly unlikely. */
6339 if (rl->secondary_in_reload >= 0)
6340 abort ();
6341
6342 if (reload_inherited[j])
6343 oldequiv = reloadreg;
6344
6345 old = XEXP (rl->in_reg, 0);
6346
6347 if (optimize && GET_CODE (oldequiv) == REG
6348 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6349 && spill_reg_store[REGNO (oldequiv)]
6350 && GET_CODE (old) == REG
6351 && (dead_or_set_p (insn,
6352 spill_reg_stored_to[REGNO (oldequiv)])
6353 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6354 old)))
6355 delete_output_reload (insn, j, REGNO (oldequiv));
6356
6357 /* Prevent normal processing of this reload. */
6358 special = 1;
6359 /* Output a special code sequence for this case. */
6360 new_spill_reg_store[REGNO (reloadreg)]
6361 = inc_for_reload (reloadreg, oldequiv, rl->out,
6362 rl->inc);
6363 }
6364
6365 /* If we are reloading a pseudo-register that was set by the previous
6366 insn, see if we can get rid of that pseudo-register entirely
6367 by redirecting the previous insn into our reload register. */
6368
6369 else if (optimize && GET_CODE (old) == REG
6370 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6371 && dead_or_set_p (insn, old)
6372 /* This is unsafe if some other reload
6373 uses the same reg first. */
6374 && ! conflicts_with_override (reloadreg)
6375 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6376 rl->when_needed, old, rl->out, j, 0))
6377 {
6378 rtx temp = PREV_INSN (insn);
6379 while (temp && GET_CODE (temp) == NOTE)
6380 temp = PREV_INSN (temp);
6381 if (temp
6382 && GET_CODE (temp) == INSN
6383 && GET_CODE (PATTERN (temp)) == SET
6384 && SET_DEST (PATTERN (temp)) == old
6385 /* Make sure we can access insn_operand_constraint. */
6386 && asm_noperands (PATTERN (temp)) < 0
6387 /* This is unsafe if prev insn rejects our reload reg. */
6388 && constraint_accepts_reg_p (insn_data[recog_memoized (temp)].operand[0].constraint,
6389 reloadreg)
6390 /* This is unsafe if operand occurs more than once in current
6391 insn. Perhaps some occurrences aren't reloaded. */
6392 && count_occurrences (PATTERN (insn), old, 0) == 1
6393 /* Don't risk splitting a matching pair of operands. */
6394 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6395 {
6396 /* Store into the reload register instead of the pseudo. */
6397 SET_DEST (PATTERN (temp)) = reloadreg;
6398
6399 /* If the previous insn is an output reload, the source is
6400 a reload register, and its spill_reg_store entry will
6401 contain the previous destination. This is now
6402 invalid. */
6403 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6404 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6405 {
6406 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6407 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6408 }
6409
6410 /* If these are the only uses of the pseudo reg,
6411 pretend for GDB it lives in the reload reg we used. */
6412 if (REG_N_DEATHS (REGNO (old)) == 1
6413 && REG_N_SETS (REGNO (old)) == 1)
6414 {
6415 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6416 alter_reg (REGNO (old), -1);
6417 }
6418 special = 1;
6419 }
6420 }
6421
6422 /* We can't do that, so output an insn to load RELOADREG. */
6423
6424#ifdef SECONDARY_INPUT_RELOAD_CLASS
6425 /* If we have a secondary reload, pick up the secondary register
6426 and icode, if any. If OLDEQUIV and OLD are different or
6427 if this is an in-out reload, recompute whether or not we
6428 still need a secondary register and what the icode should
6429 be. If we still need a secondary register and the class or
6430 icode is different, go back to reloading from OLD if using
6431 OLDEQUIV means that we got the wrong type of register. We
6432 cannot have different class or icode due to an in-out reload
6433 because we don't make such reloads when both the input and
6434 output need secondary reload registers. */
6435
6436 if (! special && rl->secondary_in_reload >= 0)
6437 {
6438 rtx second_reload_reg = 0;
6439 int secondary_reload = rl->secondary_in_reload;
6440 rtx real_oldequiv = oldequiv;
6441 rtx real_old = old;
6442 rtx tmp;
6443 enum insn_code icode;
6444
6445 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6446 and similarly for OLD.
6447 See comments in get_secondary_reload in reload.c. */
6448 /* If it is a pseudo that cannot be replaced with its
6449 equivalent MEM, we must fall back to reload_in, which
6450 will have all the necessary substitutions registered.
6451 Likewise for a pseudo that can't be replaced with its
6452 equivalent constant.
6453
6454 Take extra care for subregs of such pseudos. Note that
6455 we cannot use reg_equiv_mem in this case because it is
6456 not in the right mode. */
6457
6458 tmp = oldequiv;
6459 if (GET_CODE (tmp) == SUBREG)
6460 tmp = SUBREG_REG (tmp);
6461 if (GET_CODE (tmp) == REG
6462 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6463 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6464 || reg_equiv_constant[REGNO (tmp)] != 0))
6465 {
6466 if (! reg_equiv_mem[REGNO (tmp)]
6467 || num_not_at_initial_offset
6468 || GET_CODE (oldequiv) == SUBREG)
6469 real_oldequiv = rl->in;
6470 else
6471 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6472 }
6473
6474 tmp = old;
6475 if (GET_CODE (tmp) == SUBREG)
6476 tmp = SUBREG_REG (tmp);
6477 if (GET_CODE (tmp) == REG
6478 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6479 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6480 || reg_equiv_constant[REGNO (tmp)] != 0))
6481 {
6482 if (! reg_equiv_mem[REGNO (tmp)]
6483 || num_not_at_initial_offset
6484 || GET_CODE (old) == SUBREG)
6485 real_old = rl->in;
6486 else
6487 real_old = reg_equiv_mem[REGNO (tmp)];
6488 }
6489
6490 second_reload_reg = rld[secondary_reload].reg_rtx;
6491 icode = rl->secondary_in_icode;
6492
6493 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6494 || (rl->in != 0 && rl->out != 0))
6495 {
6496 enum reg_class new_class
6497 = SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6498 mode, real_oldequiv);
6499
6500 if (new_class == NO_REGS)
6501 second_reload_reg = 0;
6502 else
6503 {
6504 enum insn_code new_icode;
6505 enum machine_mode new_mode;
6506
6507 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6508 REGNO (second_reload_reg)))
6509 oldequiv = old, real_oldequiv = real_old;
6510 else
6511 {
6512 new_icode = reload_in_optab[(int) mode];
6513 if (new_icode != CODE_FOR_nothing
6514 && ((insn_data[(int) new_icode].operand[0].predicate
6515 && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6516 (reloadreg, mode)))
6517 || (insn_data[(int) new_icode].operand[1].predicate
6518 && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6519 (real_oldequiv, mode)))))
6520 new_icode = CODE_FOR_nothing;
6521
6522 if (new_icode == CODE_FOR_nothing)
6523 new_mode = mode;
6524 else
6525 new_mode = insn_data[(int) new_icode].operand[2].mode;
6526
6527 if (GET_MODE (second_reload_reg) != new_mode)
6528 {
6529 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6530 new_mode))
6531 oldequiv = old, real_oldequiv = real_old;
6532 else
6533 second_reload_reg
6534 = gen_rtx_REG (new_mode,
6535 REGNO (second_reload_reg));
6536 }
6537 }
6538 }
6539 }
6540
6541 /* If we still need a secondary reload register, check
6542 to see if it is being used as a scratch or intermediate
6543 register and generate code appropriately. If we need
6544 a scratch register, use REAL_OLDEQUIV since the form of
6545 the insn may depend on the actual address if it is
6546 a MEM. */
6547
6548 if (second_reload_reg)
6549 {
6550 if (icode != CODE_FOR_nothing)
6551 {
6552 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6553 second_reload_reg));
6554 special = 1;
6555 }
6556 else
6557 {
6558 /* See if we need a scratch register to load the
6559 intermediate register (a tertiary reload). */
6560 enum insn_code tertiary_icode
6561 = rld[secondary_reload].secondary_in_icode;
6562
6563 if (tertiary_icode != CODE_FOR_nothing)
6564 {
6565 rtx third_reload_reg
6566 = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
6567
6568 emit_insn ((GEN_FCN (tertiary_icode)
6569 (second_reload_reg, real_oldequiv,
6570 third_reload_reg)));
6571 }
6572 else
6573 gen_reload (second_reload_reg, real_oldequiv,
6574 rl->opnum,
6575 rl->when_needed);
6576
6577 oldequiv = second_reload_reg;
6578 }
6579 }
6580 }
6581#endif
6582
6583 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6584 {
6585 rtx real_oldequiv = oldequiv;
6586
6587 if ((GET_CODE (oldequiv) == REG
6588 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6589 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6590 || reg_equiv_constant[REGNO (oldequiv)] != 0))
6591 || (GET_CODE (oldequiv) == SUBREG
6592 && GET_CODE (SUBREG_REG (oldequiv)) == REG
6593 && (REGNO (SUBREG_REG (oldequiv))
6594 >= FIRST_PSEUDO_REGISTER)
6595 && ((reg_equiv_memory_loc
6596 [REGNO (SUBREG_REG (oldequiv))] != 0)
6597 || (reg_equiv_constant
6598 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6599 || (CONSTANT_P (oldequiv)
6600 && (PREFERRED_RELOAD_CLASS (oldequiv,
6601 REGNO_REG_CLASS (REGNO (reloadreg)))
6602 == NO_REGS)))
6603 real_oldequiv = rl->in;
6604 gen_reload (reloadreg, real_oldequiv, rl->opnum,
6605 rl->when_needed);
6606 }
6607
6608 if (flag_non_call_exceptions)
6609 copy_eh_notes (insn, get_insns ());
6610
6611 /* End this sequence. */
6612 *where = get_insns ();
6613 end_sequence ();
6614
6615 /* Update reload_override_in so that delete_address_reloads_1
6616 can see the actual register usage. */
6617 if (oldequiv_reg)
6618 reload_override_in[j] = oldequiv;
6619}
6620
6621/* Generate insns to for the output reload RL, which is for the insn described
6622 by CHAIN and has the number J. */
6623static void
6624emit_output_reload_insns (chain, rl, j)
6625 struct insn_chain *chain;
6626 struct reload *rl;
6627 int j;
6628{
6629 rtx reloadreg = rl->reg_rtx;
6630 rtx insn = chain->insn;
6631 int special = 0;
6632 rtx old = rl->out;
6633 enum machine_mode mode = GET_MODE (old);
6634 rtx p;
6635
6636 if (rl->when_needed == RELOAD_OTHER)
6637 start_sequence ();
6638 else
6639 push_to_sequence (output_reload_insns[rl->opnum]);
6640
6641 /* Determine the mode to reload in.
6642 See comments above (for input reloading). */
6643
6644 if (mode == VOIDmode)
6645 {
6646 /* VOIDmode should never happen for an output. */
6647 if (asm_noperands (PATTERN (insn)) < 0)
6648 /* It's the compiler's fault. */
6649 fatal_insn ("VOIDmode on an output", insn);
6650 error_for_asm (insn, "output operand is constant in `asm'");
6651 /* Prevent crash--use something we know is valid. */
6652 mode = word_mode;
6653 old = gen_rtx_REG (mode, REGNO (reloadreg));
6654 }
6655
6656 if (GET_MODE (reloadreg) != mode)
6657 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6658
6659#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6660
6661 /* If we need two reload regs, set RELOADREG to the intermediate
6662 one, since it will be stored into OLD. We might need a secondary
6663 register only for an input reload, so check again here. */
6664
6665 if (rl->secondary_out_reload >= 0)
6666 {
6667 rtx real_old = old;
6668
6669 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6670 && reg_equiv_mem[REGNO (old)] != 0)
6671 real_old = reg_equiv_mem[REGNO (old)];
6672
6673 if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl->class,
6674 mode, real_old)
6675 != NO_REGS))
6676 {
6677 rtx second_reloadreg = reloadreg;
6678 reloadreg = rld[rl->secondary_out_reload].reg_rtx;
6679
6680 /* See if RELOADREG is to be used as a scratch register
6681 or as an intermediate register. */
6682 if (rl->secondary_out_icode != CODE_FOR_nothing)
6683 {
6684 emit_insn ((GEN_FCN (rl->secondary_out_icode)
6685 (real_old, second_reloadreg, reloadreg)));
6686 special = 1;
6687 }
6688 else
6689 {
6690 /* See if we need both a scratch and intermediate reload
6691 register. */
6692
6693 int secondary_reload = rl->secondary_out_reload;
6694 enum insn_code tertiary_icode
6695 = rld[secondary_reload].secondary_out_icode;
6696
6697 if (GET_MODE (reloadreg) != mode)
6698 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6699
6700 if (tertiary_icode != CODE_FOR_nothing)
6701 {
6702 rtx third_reloadreg
6703 = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
6704 rtx tem;
6705
6706 /* Copy primary reload reg to secondary reload reg.
6707 (Note that these have been swapped above, then
6708 secondary reload reg to OLD using our insn.) */
6709
6710 /* If REAL_OLD is a paradoxical SUBREG, remove it
6711 and try to put the opposite SUBREG on
6712 RELOADREG. */
6713 if (GET_CODE (real_old) == SUBREG
6714 && (GET_MODE_SIZE (GET_MODE (real_old))
6715 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6716 && 0 != (tem = gen_lowpart_common
6717 (GET_MODE (SUBREG_REG (real_old)),
6718 reloadreg)))
6719 real_old = SUBREG_REG (real_old), reloadreg = tem;
6720
6721 gen_reload (reloadreg, second_reloadreg,
6722 rl->opnum, rl->when_needed);
6723 emit_insn ((GEN_FCN (tertiary_icode)
6724 (real_old, reloadreg, third_reloadreg)));
6725 special = 1;
6726 }
6727
6728 else
6729 /* Copy between the reload regs here and then to
6730 OUT later. */
6731
6732 gen_reload (reloadreg, second_reloadreg,
6733 rl->opnum, rl->when_needed);
6734 }
6735 }
6736 }
6737#endif
6738
6739 /* Output the last reload insn. */
6740 if (! special)
6741 {
6742 rtx set;
6743
6744 /* Don't output the last reload if OLD is not the dest of
6745 INSN and is in the src and is clobbered by INSN. */
6746 if (! flag_expensive_optimizations
6747 || GET_CODE (old) != REG
6748 || !(set = single_set (insn))
6749 || rtx_equal_p (old, SET_DEST (set))
6750 || !reg_mentioned_p (old, SET_SRC (set))
6751 || !regno_clobbered_p (REGNO (old), insn, rl->mode, 0))
6752 gen_reload (old, reloadreg, rl->opnum,
6753 rl->when_needed);
6754 }
6755
6756 /* Look at all insns we emitted, just to be safe. */
6757 for (p = get_insns (); p; p = NEXT_INSN (p))
6758 if (INSN_P (p))
6759 {
6760 rtx pat = PATTERN (p);
6761
6762 /* If this output reload doesn't come from a spill reg,
6763 clear any memory of reloaded copies of the pseudo reg.
6764 If this output reload comes from a spill reg,
6765 reg_has_output_reload will make this do nothing. */
6766 note_stores (pat, forget_old_reloads_1, NULL);
6767
6768 if (reg_mentioned_p (rl->reg_rtx, pat))
6769 {
6770 rtx set = single_set (insn);
6771 if (reload_spill_index[j] < 0
6772 && set
6773 && SET_SRC (set) == rl->reg_rtx)
6774 {
6775 int src = REGNO (SET_SRC (set));
6776
6777 reload_spill_index[j] = src;
6778 SET_HARD_REG_BIT (reg_is_output_reload, src);
6779 if (find_regno_note (insn, REG_DEAD, src))
6780 SET_HARD_REG_BIT (reg_reloaded_died, src);
6781 }
6782 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6783 {
6784 int s = rl->secondary_out_reload;
6785 set = single_set (p);
6786 /* If this reload copies only to the secondary reload
6787 register, the secondary reload does the actual
6788 store. */
6789 if (s >= 0 && set == NULL_RTX)
6790 /* We can't tell what function the secondary reload
6791 has and where the actual store to the pseudo is
6792 made; leave new_spill_reg_store alone. */
6793 ;
6794 else if (s >= 0
6795 && SET_SRC (set) == rl->reg_rtx
6796 && SET_DEST (set) == rld[s].reg_rtx)
6797 {
6798 /* Usually the next instruction will be the
6799 secondary reload insn; if we can confirm
6800 that it is, setting new_spill_reg_store to
6801 that insn will allow an extra optimization. */
6802 rtx s_reg = rld[s].reg_rtx;
6803 rtx next = NEXT_INSN (p);
6804 rld[s].out = rl->out;
6805 rld[s].out_reg = rl->out_reg;
6806 set = single_set (next);
6807 if (set && SET_SRC (set) == s_reg
6808 && ! new_spill_reg_store[REGNO (s_reg)])
6809 {
6810 SET_HARD_REG_BIT (reg_is_output_reload,
6811 REGNO (s_reg));
6812 new_spill_reg_store[REGNO (s_reg)] = next;
6813 }
6814 }
6815 else
6816 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
6817 }
6818 }
6819 }
6820
6821 if (rl->when_needed == RELOAD_OTHER)
6822 {
6823 emit_insns (other_output_reload_insns[rl->opnum]);
6824 other_output_reload_insns[rl->opnum] = get_insns ();
6825 }
6826 else
6827 output_reload_insns[rl->opnum] = get_insns ();
6828
6829 if (flag_non_call_exceptions)
6830 copy_eh_notes (insn, get_insns ());
6831
6832 end_sequence ();
6833}
6834
6835/* Do input reloading for reload RL, which is for the insn described by CHAIN
6836 and has the number J. */
6837static void
6838do_input_reload (chain, rl, j)
6839 struct insn_chain *chain;
6840 struct reload *rl;
6841 int j;
6842{
6843 int expect_occurrences = 1;
6844 rtx insn = chain->insn;
6845 rtx old = (rl->in && GET_CODE (rl->in) == MEM
6846 ? rl->in_reg : rl->in);
6847
6848 if (old != 0
6849 /* AUTO_INC reloads need to be handled even if inherited. We got an
6850 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6851 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
6852 && ! rtx_equal_p (rl->reg_rtx, old)
6853 && rl->reg_rtx != 0)
6854 emit_input_reload_insns (chain, rld + j, old, j);
6855
6856 /* When inheriting a wider reload, we have a MEM in rl->in,
6857 e.g. inheriting a SImode output reload for
6858 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6859 if (optimize && reload_inherited[j] && rl->in
6860 && GET_CODE (rl->in) == MEM
6861 && GET_CODE (rl->in_reg) == MEM
6862 && reload_spill_index[j] >= 0
6863 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6864 {
6865 expect_occurrences
6866 = count_occurrences (PATTERN (insn), rl->in, 0) == 1 ? 0 : -1;
6867 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6868 }
6869
6870 /* If we are reloading a register that was recently stored in with an
6871 output-reload, see if we can prove there was
6872 actually no need to store the old value in it. */
6873
6874 if (optimize
6875 && (reload_inherited[j] || reload_override_in[j])
6876 && rl->reg_rtx
6877 && GET_CODE (rl->reg_rtx) == REG
6878 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
6879#if 0
6880 /* There doesn't seem to be any reason to restrict this to pseudos
6881 and doing so loses in the case where we are copying from a
6882 register of the wrong class. */
6883 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
6884 >= FIRST_PSEUDO_REGISTER)
6885#endif
6886 /* The insn might have already some references to stackslots
6887 replaced by MEMs, while reload_out_reg still names the
6888 original pseudo. */
6889 && (dead_or_set_p (insn,
6890 spill_reg_stored_to[REGNO (rl->reg_rtx)])
6891 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
6892 rl->out_reg)))
6893 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
6894}
6895
6896/* Do output reloading for reload RL, which is for the insn described by
6897 CHAIN and has the number J.
6898 ??? At some point we need to support handling output reloads of
6899 JUMP_INSNs or insns that set cc0. */
6900static void
6901do_output_reload (chain, rl, j)
6902 struct insn_chain *chain;
6903 struct reload *rl;
6904 int j;
6905{
6906 rtx note, old;
6907 rtx insn = chain->insn;
6908 /* If this is an output reload that stores something that is
6909 not loaded in this same reload, see if we can eliminate a previous
6910 store. */
6911 rtx pseudo = rl->out_reg;
6912
6913 if (pseudo
6914 && optimize
6915 && GET_CODE (pseudo) == REG
6916 && ! rtx_equal_p (rl->in_reg, pseudo)
6917 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6918 && reg_last_reload_reg[REGNO (pseudo)])
6919 {
6920 int pseudo_no = REGNO (pseudo);
6921 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
6922
6923 /* We don't need to test full validity of last_regno for
6924 inherit here; we only want to know if the store actually
6925 matches the pseudo. */
6926 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
6927 && reg_reloaded_contents[last_regno] == pseudo_no
6928 && spill_reg_store[last_regno]
6929 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
6930 delete_output_reload (insn, j, last_regno);
6931 }
6932
6933 old = rl->out_reg;
6934 if (old == 0
6935 || rl->reg_rtx == old
6936 || rl->reg_rtx == 0)
6937 return;
6938
6939 /* An output operand that dies right away does need a reload,
6940 but need not be copied from it. Show the new location in the
6941 REG_UNUSED note. */
6942 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6943 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6944 {
6945 XEXP (note, 0) = rl->reg_rtx;
6946 return;
6947 }
6948 /* Likewise for a SUBREG of an operand that dies. */
6949 else if (GET_CODE (old) == SUBREG
6950 && GET_CODE (SUBREG_REG (old)) == REG
6951 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6952 SUBREG_REG (old))))
6953 {
6954 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6955 rl->reg_rtx);
6956 return;
6957 }
6958 else if (GET_CODE (old) == SCRATCH)
6959 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6960 but we don't want to make an output reload. */
6961 return;
6962
6963 /* If is a JUMP_INSN, we can't support output reloads yet. */
6964 if (GET_CODE (insn) == JUMP_INSN)
6965 abort ();
6966
6967 emit_output_reload_insns (chain, rld + j, j);
6968}
6969
6970/* Output insns to reload values in and out of the chosen reload regs. */
6971
6972static void
6973emit_reload_insns (chain)
6974 struct insn_chain *chain;
6975{
6976 rtx insn = chain->insn;
6977
6978 int j;
6979
6980 CLEAR_HARD_REG_SET (reg_reloaded_died);
6981
6982 for (j = 0; j < reload_n_operands; j++)
6983 input_reload_insns[j] = input_address_reload_insns[j]
6984 = inpaddr_address_reload_insns[j]
6985 = output_reload_insns[j] = output_address_reload_insns[j]
6986 = outaddr_address_reload_insns[j]
6987 = other_output_reload_insns[j] = 0;
6988 other_input_address_reload_insns = 0;
6989 other_input_reload_insns = 0;
6990 operand_reload_insns = 0;
6991 other_operand_reload_insns = 0;
6992
6993 /* Dump reloads into the dump file. */
6994 if (rtl_dump_file)
6995 {
6996 fprintf (rtl_dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
6997 debug_reload_to_stream (rtl_dump_file);
6998 }
6999
7000 /* Now output the instructions to copy the data into and out of the
7001 reload registers. Do these in the order that the reloads were reported,
7002 since reloads of base and index registers precede reloads of operands
7003 and the operands may need the base and index registers reloaded. */
7004
7005 for (j = 0; j < n_reloads; j++)
7006 {
7007 if (rld[j].reg_rtx
7008 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7009 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7010
7011 do_input_reload (chain, rld + j, j);
7012 do_output_reload (chain, rld + j, j);
7013 }
7014
7015 /* Now write all the insns we made for reloads in the order expected by
7016 the allocation functions. Prior to the insn being reloaded, we write
7017 the following reloads:
7018
7019 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7020
7021 RELOAD_OTHER reloads.
7022
7023 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7024 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7025 RELOAD_FOR_INPUT reload for the operand.
7026
7027 RELOAD_FOR_OPADDR_ADDRS reloads.
7028
7029 RELOAD_FOR_OPERAND_ADDRESS reloads.
7030
7031 After the insn being reloaded, we write the following:
7032
7033 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7034 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7035 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7036 reloads for the operand. The RELOAD_OTHER output reloads are
7037 output in descending order by reload number. */
7038
7039 emit_insns_before (other_input_address_reload_insns, insn);
7040 emit_insns_before (other_input_reload_insns, insn);
7041
7042 for (j = 0; j < reload_n_operands; j++)
7043 {
7044 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7045 emit_insns_before (input_address_reload_insns[j], insn);
7046 emit_insns_before (input_reload_insns[j], insn);
7047 }
7048
7049 emit_insns_before (other_operand_reload_insns, insn);
7050 emit_insns_before (operand_reload_insns, insn);
7051
7052 for (j = 0; j < reload_n_operands; j++)
7053 {
7054 rtx x = emit_insns_after (outaddr_address_reload_insns[j], insn);
7055 x = emit_insns_after (output_address_reload_insns[j], x);
7056 x = emit_insns_after (output_reload_insns[j], x);
7057 emit_insns_after (other_output_reload_insns[j], x);
7058 }
7059
7060 /* For all the spill regs newly reloaded in this instruction,
7061 record what they were reloaded from, so subsequent instructions
7062 can inherit the reloads.
7063
7064 Update spill_reg_store for the reloads of this insn.
7065 Copy the elements that were updated in the loop above. */
7066
7067 for (j = 0; j < n_reloads; j++)
7068 {
7069 int r = reload_order[j];
7070 int i = reload_spill_index[r];
7071
7072 /* If this is a non-inherited input reload from a pseudo, we must
7073 clear any memory of a previous store to the same pseudo. Only do
7074 something if there will not be an output reload for the pseudo
7075 being reloaded. */
7076 if (rld[r].in_reg != 0
7077 && ! (reload_inherited[r] || reload_override_in[r]))
7078 {
7079 rtx reg = rld[r].in_reg;
7080
7081 if (GET_CODE (reg) == SUBREG)
7082 reg = SUBREG_REG (reg);
7083
7084 if (GET_CODE (reg) == REG
7085 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7086 && ! reg_has_output_reload[REGNO (reg)])
7087 {
7088 int nregno = REGNO (reg);
7089
7090 if (reg_last_reload_reg[nregno])
7091 {
7092 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7093
7094 if (reg_reloaded_contents[last_regno] == nregno)
7095 spill_reg_store[last_regno] = 0;
7096 }
7097 }
7098 }
7099
7100 /* I is nonneg if this reload used a register.
7101 If rld[r].reg_rtx is 0, this is an optional reload
7102 that we opted to ignore. */
7103
7104 if (i >= 0 && rld[r].reg_rtx != 0)
7105 {
7106 int nr = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
7107 int k;
7108 int part_reaches_end = 0;
7109 int all_reaches_end = 1;
7110
7111 /* For a multi register reload, we need to check if all or part
7112 of the value lives to the end. */
7113 for (k = 0; k < nr; k++)
7114 {
7115 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7116 rld[r].when_needed))
7117 part_reaches_end = 1;
7118 else
7119 all_reaches_end = 0;
7120 }
7121
7122 /* Ignore reloads that don't reach the end of the insn in
7123 entirety. */
7124 if (all_reaches_end)
7125 {
7126 /* First, clear out memory of what used to be in this spill reg.
7127 If consecutive registers are used, clear them all. */
7128
7129 for (k = 0; k < nr; k++)
7130 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7131
7132 /* Maybe the spill reg contains a copy of reload_out. */
7133 if (rld[r].out != 0
7134 && (GET_CODE (rld[r].out) == REG
7135#ifdef AUTO_INC_DEC
7136 || ! rld[r].out_reg
7137#endif
7138 || GET_CODE (rld[r].out_reg) == REG))
7139 {
7140 rtx out = (GET_CODE (rld[r].out) == REG
7141 ? rld[r].out
7142 : rld[r].out_reg
7143 ? rld[r].out_reg
7144/* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7145 int nregno = REGNO (out);
7146 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7147 : HARD_REGNO_NREGS (nregno,
7148 GET_MODE (rld[r].reg_rtx)));
7149
7150 spill_reg_store[i] = new_spill_reg_store[i];
7151 spill_reg_stored_to[i] = out;
7152 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7153
7154 /* If NREGNO is a hard register, it may occupy more than
7155 one register. If it does, say what is in the
7156 rest of the registers assuming that both registers
7157 agree on how many words the object takes. If not,
7158 invalidate the subsequent registers. */
7159
7160 if (nregno < FIRST_PSEUDO_REGISTER)
7161 for (k = 1; k < nnr; k++)
7162 reg_last_reload_reg[nregno + k]
7163 = (nr == nnr
7164 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7165 REGNO (rld[r].reg_rtx) + k)
7166 : 0);
7167
7168 /* Now do the inverse operation. */
7169 for (k = 0; k < nr; k++)
7170 {
7171 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7172 reg_reloaded_contents[i + k]
7173 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7174 ? nregno
7175 : nregno + k);
7176 reg_reloaded_insn[i + k] = insn;
7177 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7178 }
7179 }
7180
7181 /* Maybe the spill reg contains a copy of reload_in. Only do
7182 something if there will not be an output reload for
7183 the register being reloaded. */
7184 else if (rld[r].out_reg == 0
7185 && rld[r].in != 0
7186 && ((GET_CODE (rld[r].in) == REG
7187 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7188 && ! reg_has_output_reload[REGNO (rld[r].in)])
7189 || (GET_CODE (rld[r].in_reg) == REG
7190 && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7191 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7192 {
7193 int nregno;
7194 int nnr;
7195
7196 if (GET_CODE (rld[r].in) == REG
7197 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7198 nregno = REGNO (rld[r].in);
7199 else if (GET_CODE (rld[r].in_reg) == REG)
7200 nregno = REGNO (rld[r].in_reg);
7201 else
7202 nregno = REGNO (XEXP (rld[r].in_reg, 0));
7203
7204 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7205 : HARD_REGNO_NREGS (nregno,
7206 GET_MODE (rld[r].reg_rtx)));
7207
7208 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7209
7210 if (nregno < FIRST_PSEUDO_REGISTER)
7211 for (k = 1; k < nnr; k++)
7212 reg_last_reload_reg[nregno + k]
7213 = (nr == nnr
7214 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7215 REGNO (rld[r].reg_rtx) + k)
7216 : 0);
7217
7218 /* Unless we inherited this reload, show we haven't
7219 recently done a store.
7220 Previous stores of inherited auto_inc expressions
7221 also have to be discarded. */
7222 if (! reload_inherited[r]
7223 || (rld[r].out && ! rld[r].out_reg))
7224 spill_reg_store[i] = 0;
7225
7226 for (k = 0; k < nr; k++)
7227 {
7228 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7229 reg_reloaded_contents[i + k]
7230 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7231 ? nregno
7232 : nregno + k);
7233 reg_reloaded_insn[i + k] = insn;
7234 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7235 }
7236 }
7237 }
7238
7239 /* However, if part of the reload reaches the end, then we must
7240 invalidate the old info for the part that survives to the end. */
7241 else if (part_reaches_end)
7242 {
7243 for (k = 0; k < nr; k++)
7244 if (reload_reg_reaches_end_p (i + k,
7245 rld[r].opnum,
7246 rld[r].when_needed))
7247 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7248 }
7249 }
7250
7251 /* The following if-statement was #if 0'd in 1.34 (or before...).
7252 It's reenabled in 1.35 because supposedly nothing else
7253 deals with this problem. */
7254
7255 /* If a register gets output-reloaded from a non-spill register,
7256 that invalidates any previous reloaded copy of it.
7257 But forget_old_reloads_1 won't get to see it, because
7258 it thinks only about the original insn. So invalidate it here. */
7259 if (i < 0 && rld[r].out != 0
7260 && (GET_CODE (rld[r].out) == REG
7261 || (GET_CODE (rld[r].out) == MEM
7262 && GET_CODE (rld[r].out_reg) == REG)))
7263 {
7264 rtx out = (GET_CODE (rld[r].out) == REG
7265 ? rld[r].out : rld[r].out_reg);
7266 int nregno = REGNO (out);
7267 if (nregno >= FIRST_PSEUDO_REGISTER)
7268 {
7269 rtx src_reg, store_insn = NULL_RTX;
7270
7271 reg_last_reload_reg[nregno] = 0;
7272
7273 /* If we can find a hard register that is stored, record
7274 the storing insn so that we may delete this insn with
7275 delete_output_reload. */
7276 src_reg = rld[r].reg_rtx;
7277
7278 /* If this is an optional reload, try to find the source reg
7279 from an input reload. */
7280 if (! src_reg)
7281 {
7282 rtx set = single_set (insn);
7283 if (set && SET_DEST (set) == rld[r].out)
7284 {
7285 int k;
7286
7287 src_reg = SET_SRC (set);
7288 store_insn = insn;
7289 for (k = 0; k < n_reloads; k++)
7290 {
7291 if (rld[k].in == src_reg)
7292 {
7293 src_reg = rld[k].reg_rtx;
7294 break;
7295 }
7296 }
7297 }
7298 }
7299 else
7300 store_insn = new_spill_reg_store[REGNO (src_reg)];
7301 if (src_reg && GET_CODE (src_reg) == REG
7302 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7303 {
7304 int src_regno = REGNO (src_reg);
7305 int nr = HARD_REGNO_NREGS (src_regno, rld[r].mode);
7306 /* The place where to find a death note varies with
7307 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7308 necessarily checked exactly in the code that moves
7309 notes, so just check both locations. */
7310 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7311 if (! note && store_insn)
7312 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7313 while (nr-- > 0)
7314 {
7315 spill_reg_store[src_regno + nr] = store_insn;
7316 spill_reg_stored_to[src_regno + nr] = out;
7317 reg_reloaded_contents[src_regno + nr] = nregno;
7318 reg_reloaded_insn[src_regno + nr] = store_insn;
7319 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7320 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7321 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7322 if (note)
7323 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7324 else
7325 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7326 }
7327 reg_last_reload_reg[nregno] = src_reg;
7328 }
7329 }
7330 else
7331 {
7332 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (rld[r].out));
7333
7334 while (num_regs-- > 0)
7335 reg_last_reload_reg[nregno + num_regs] = 0;
7336 }
7337 }
7338 }
7339 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7340}
7341
7342/* Emit code to perform a reload from IN (which may be a reload register) to
7343 OUT (which may also be a reload register). IN or OUT is from operand
7344 OPNUM with reload type TYPE.
7345
7346 Returns first insn emitted. */
7347
7348rtx
7349gen_reload (out, in, opnum, type)
7350 rtx out;
7351 rtx in;
7352 int opnum;
7353 enum reload_type type;
7354{
7355 rtx last = get_last_insn ();
7356 rtx tem;
7357
7358 /* If IN is a paradoxical SUBREG, remove it and try to put the
7359 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7360 if (GET_CODE (in) == SUBREG
7361 && (GET_MODE_SIZE (GET_MODE (in))
7362 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7363 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7364 in = SUBREG_REG (in), out = tem;
7365 else if (GET_CODE (out) == SUBREG
7366 && (GET_MODE_SIZE (GET_MODE (out))
7367 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7368 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7369 out = SUBREG_REG (out), in = tem;
7370
7371 /* How to do this reload can get quite tricky. Normally, we are being
7372 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7373 register that didn't get a hard register. In that case we can just
7374 call emit_move_insn.
7375
7376 We can also be asked to reload a PLUS that adds a register or a MEM to
7377 another register, constant or MEM. This can occur during frame pointer
7378 elimination and while reloading addresses. This case is handled by
7379 trying to emit a single insn to perform the add. If it is not valid,
7380 we use a two insn sequence.
7381
7382 Finally, we could be called to handle an 'o' constraint by putting
7383 an address into a register. In that case, we first try to do this
7384 with a named pattern of "reload_load_address". If no such pattern
7385 exists, we just emit a SET insn and hope for the best (it will normally
7386 be valid on machines that use 'o').
7387
7388 This entire process is made complex because reload will never
7389 process the insns we generate here and so we must ensure that
7390 they will fit their constraints and also by the fact that parts of
7391 IN might be being reloaded separately and replaced with spill registers.
7392 Because of this, we are, in some sense, just guessing the right approach
7393 here. The one listed above seems to work.
7394
7395 ??? At some point, this whole thing needs to be rethought. */
7396
7397 if (GET_CODE (in) == PLUS
7398 && (GET_CODE (XEXP (in, 0)) == REG
7399 || GET_CODE (XEXP (in, 0)) == SUBREG
7400 || GET_CODE (XEXP (in, 0)) == MEM)
7401 && (GET_CODE (XEXP (in, 1)) == REG
7402 || GET_CODE (XEXP (in, 1)) == SUBREG
7403 || CONSTANT_P (XEXP (in, 1))
7404 || GET_CODE (XEXP (in, 1)) == MEM))
7405 {
7406 /* We need to compute the sum of a register or a MEM and another
7407 register, constant, or MEM, and put it into the reload
7408 register. The best possible way of doing this is if the machine
7409 has a three-operand ADD insn that accepts the required operands.
7410
7411 The simplest approach is to try to generate such an insn and see if it
7412 is recognized and matches its constraints. If so, it can be used.
7413
7414 It might be better not to actually emit the insn unless it is valid,
7415 but we need to pass the insn as an operand to `recog' and
7416 `extract_insn' and it is simpler to emit and then delete the insn if
7417 not valid than to dummy things up. */
7418
7419 rtx op0, op1, tem, insn;
7420 int code;
7421
7422 op0 = find_replacement (&XEXP (in, 0));
7423 op1 = find_replacement (&XEXP (in, 1));
7424
7425 /* Since constraint checking is strict, commutativity won't be
7426 checked, so we need to do that here to avoid spurious failure
7427 if the add instruction is two-address and the second operand
7428 of the add is the same as the reload reg, which is frequently
7429 the case. If the insn would be A = B + A, rearrange it so
7430 it will be A = A + B as constrain_operands expects. */
7431
7432 if (GET_CODE (XEXP (in, 1)) == REG
7433 && REGNO (out) == REGNO (XEXP (in, 1)))
7434 tem = op0, op0 = op1, op1 = tem;
7435
7436 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7437 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7438
7439 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7440 code = recog_memoized (insn);
7441
7442 if (code >= 0)
7443 {
7444 extract_insn (insn);
7445 /* We want constrain operands to treat this insn strictly in
7446 its validity determination, i.e., the way it would after reload
7447 has completed. */
7448 if (constrain_operands (1))
7449 return insn;
7450 }
7451
7452 delete_insns_since (last);
7453
7454 /* If that failed, we must use a conservative two-insn sequence.
7455
7456 Use a move to copy one operand into the reload register. Prefer
7457 to reload a constant, MEM or pseudo since the move patterns can
7458 handle an arbitrary operand. If OP1 is not a constant, MEM or
7459 pseudo and OP1 is not a valid operand for an add instruction, then
7460 reload OP1.
7461
7462 After reloading one of the operands into the reload register, add
7463 the reload register to the output register.
7464
7465 If there is another way to do this for a specific machine, a
7466 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7467 we emit below. */
7468
7469 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7470
7471 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7472 || (GET_CODE (op1) == REG
7473 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7474 || (code != CODE_FOR_nothing
7475 && ! ((*insn_data[code].operand[2].predicate)
7476 (op1, insn_data[code].operand[2].mode))))
7477 tem = op0, op0 = op1, op1 = tem;
7478
7479 gen_reload (out, op0, opnum, type);
7480
7481 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7482 This fixes a problem on the 32K where the stack pointer cannot
7483 be used as an operand of an add insn. */
7484
7485 if (rtx_equal_p (op0, op1))
7486 op1 = out;
7487
7488 insn = emit_insn (gen_add2_insn (out, op1));
7489
7490 /* If that failed, copy the address register to the reload register.
7491 Then add the constant to the reload register. */
7492
7493 code = recog_memoized (insn);
7494
7495 if (code >= 0)
7496 {
7497 extract_insn (insn);
7498 /* We want constrain operands to treat this insn strictly in
7499 its validity determination, i.e., the way it would after reload
7500 has completed. */
7501 if (constrain_operands (1))
7502 {
7503 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7504 REG_NOTES (insn)
7505 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7506 return insn;
7507 }
7508 }
7509
7510 delete_insns_since (last);
7511
7512 gen_reload (out, op1, opnum, type);
7513 insn = emit_insn (gen_add2_insn (out, op0));
7514 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7515 }
7516
7517#ifdef SECONDARY_MEMORY_NEEDED
7518 /* If we need a memory location to do the move, do it that way. */
7519 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7520 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7521 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7522 REGNO_REG_CLASS (REGNO (out)),
7523 GET_MODE (out)))
7524 {
7525 /* Get the memory to use and rewrite both registers to its mode. */
7526 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7527
7528 if (GET_MODE (loc) != GET_MODE (out))
7529 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7530
7531 if (GET_MODE (loc) != GET_MODE (in))
7532 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7533
7534 gen_reload (loc, in, opnum, type);
7535 gen_reload (out, loc, opnum, type);
7536 }
7537#endif
7538
7539 /* If IN is a simple operand, use gen_move_insn. */
7540 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7541 emit_insn (gen_move_insn (out, in));
7542
7543#ifdef HAVE_reload_load_address
7544 else if (HAVE_reload_load_address)
7545 emit_insn (gen_reload_load_address (out, in));
7546#endif
7547
7548 /* Otherwise, just write (set OUT IN) and hope for the best. */
7549 else
7550 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7551
7552 /* Return the first insn emitted.
7553 We can not just return get_last_insn, because there may have
7554 been multiple instructions emitted. Also note that gen_move_insn may
7555 emit more than one insn itself, so we can not assume that there is one
7556 insn emitted per emit_insn_before call. */
7557
7558 return last ? NEXT_INSN (last) : get_insns ();
7559}
7560
7561/* Delete a previously made output-reload whose result we now believe
7562 is not needed. First we double-check.
7563
7564 INSN is the insn now being processed.
7565 LAST_RELOAD_REG is the hard register number for which we want to delete
7566 the last output reload.
7567 J is the reload-number that originally used REG. The caller has made
7568 certain that reload J doesn't use REG any longer for input. */
7569
7570static void
7571delete_output_reload (insn, j, last_reload_reg)
7572 rtx insn;
7573 int j;
7574 int last_reload_reg;
7575{
7576 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7577 rtx reg = spill_reg_stored_to[last_reload_reg];
7578 int k;
7579 int n_occurrences;
7580 int n_inherited = 0;
7581 rtx i1;
7582 rtx substed;
7583
7584 /* Get the raw pseudo-register referred to. */
7585
7586 while (GET_CODE (reg) == SUBREG)
7587 reg = SUBREG_REG (reg);
7588 substed = reg_equiv_memory_loc[REGNO (reg)];
7589
7590 /* This is unsafe if the operand occurs more often in the current
7591 insn than it is inherited. */
7592 for (k = n_reloads - 1; k >= 0; k--)
7593 {
7594 rtx reg2 = rld[k].in;
7595 if (! reg2)
7596 continue;
7597 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7598 reg2 = rld[k].in_reg;
7599#ifdef AUTO_INC_DEC
7600 if (rld[k].out && ! rld[k].out_reg)
7601 reg2 = XEXP (rld[k].in_reg, 0);
7602#endif
7603 while (GET_CODE (reg2) == SUBREG)
7604 reg2 = SUBREG_REG (reg2);
7605 if (rtx_equal_p (reg2, reg))
7606 {
7607 if (reload_inherited[k] || reload_override_in[k] || k == j)
7608 {
7609 n_inherited++;
7610 reg2 = rld[k].out_reg;
7611 if (! reg2)
7612 continue;
7613 while (GET_CODE (reg2) == SUBREG)
7614 reg2 = XEXP (reg2, 0);
7615 if (rtx_equal_p (reg2, reg))
7616 n_inherited++;
7617 }
7618 else
7619 return;
7620 }
7621 }
7622 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7623 if (substed)
7624 n_occurrences += count_occurrences (PATTERN (insn),
7625 eliminate_regs (substed, 0,
7626 NULL_RTX), 0);
7627 if (n_occurrences > n_inherited)
7628 return;
7629
7630 /* If the pseudo-reg we are reloading is no longer referenced
7631 anywhere between the store into it and here,
7632 and no jumps or labels intervene, then the value can get
7633 here through the reload reg alone.
7634 Otherwise, give up--return. */
7635 for (i1 = NEXT_INSN (output_reload_insn);
7636 i1 != insn; i1 = NEXT_INSN (i1))
7637 {
7638 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7639 return;
7640 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7641 && reg_mentioned_p (reg, PATTERN (i1)))
7642 {
7643 /* If this is USE in front of INSN, we only have to check that
7644 there are no more references than accounted for by inheritance. */
7645 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
7646 {
7647 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7648 i1 = NEXT_INSN (i1);
7649 }
7650 if (n_occurrences <= n_inherited && i1 == insn)
7651 break;
7652 return;
7653 }
7654 }
7655
7656 /* We will be deleting the insn. Remove the spill reg information. */
7657 for (k = HARD_REGNO_NREGS (last_reload_reg, GET_MODE (reg)); k-- > 0; )
7658 {
7659 spill_reg_store[last_reload_reg + k] = 0;
7660 spill_reg_stored_to[last_reload_reg + k] = 0;
7661 }
7662
7663 /* The caller has already checked that REG dies or is set in INSN.
7664 It has also checked that we are optimizing, and thus some
7665 inaccurancies in the debugging information are acceptable.
7666 So we could just delete output_reload_insn. But in some cases
7667 we can improve the debugging information without sacrificing
7668 optimization - maybe even improving the code: See if the pseudo
7669 reg has been completely replaced with reload regs. If so, delete
7670 the store insn and forget we had a stack slot for the pseudo. */
7671 if (rld[j].out != rld[j].in
7672 && REG_N_DEATHS (REGNO (reg)) == 1
7673 && REG_N_SETS (REGNO (reg)) == 1
7674 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7675 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7676 {
7677 rtx i2;
7678
7679 /* We know that it was used only between here and the beginning of
7680 the current basic block. (We also know that the last use before
7681 INSN was the output reload we are thinking of deleting, but never
7682 mind that.) Search that range; see if any ref remains. */
7683 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7684 {
7685 rtx set = single_set (i2);
7686
7687 /* Uses which just store in the pseudo don't count,
7688 since if they are the only uses, they are dead. */
7689 if (set != 0 && SET_DEST (set) == reg)
7690 continue;
7691 if (GET_CODE (i2) == CODE_LABEL
7692 || GET_CODE (i2) == JUMP_INSN)
7693 break;
7694 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7695 && reg_mentioned_p (reg, PATTERN (i2)))
7696 {
7697 /* Some other ref remains; just delete the output reload we
7698 know to be dead. */
7699 delete_address_reloads (output_reload_insn, insn);
7700 delete_insn (output_reload_insn);
7701 return;
7702 }
7703 }
7704
7705 /* Delete the now-dead stores into this pseudo. Note that this
7706 loop also takes care of deleting output_reload_insn. */
7707 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7708 {
7709 rtx set = single_set (i2);
7710
7711 if (set != 0 && SET_DEST (set) == reg)
7712 {
7713 delete_address_reloads (i2, insn);
7714 delete_insn (i2);
7715 }
7716 if (GET_CODE (i2) == CODE_LABEL
7717 || GET_CODE (i2) == JUMP_INSN)
7718 break;
7719 }
7720
7721 /* For the debugging info, say the pseudo lives in this reload reg. */
7722 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
7723 alter_reg (REGNO (reg), -1);
7724 }
7725 else
7726 {
7727 delete_address_reloads (output_reload_insn, insn);
7728 delete_insn (output_reload_insn);
7729 }
7730}
7731
7732/* We are going to delete DEAD_INSN. Recursively delete loads of
7733 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7734 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7735static void
7736delete_address_reloads (dead_insn, current_insn)
7737 rtx dead_insn, current_insn;
7738{
7739 rtx set = single_set (dead_insn);
7740 rtx set2, dst, prev, next;
7741 if (set)
7742 {
7743 rtx dst = SET_DEST (set);
7744 if (GET_CODE (dst) == MEM)
7745 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7746 }
7747 /* If we deleted the store from a reloaded post_{in,de}c expression,
7748 we can delete the matching adds. */
7749 prev = PREV_INSN (dead_insn);
7750 next = NEXT_INSN (dead_insn);
7751 if (! prev || ! next)
7752 return;
7753 set = single_set (next);
7754 set2 = single_set (prev);
7755 if (! set || ! set2
7756 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7757 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7758 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7759 return;
7760 dst = SET_DEST (set);
7761 if (! rtx_equal_p (dst, SET_DEST (set2))
7762 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7763 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7764 || (INTVAL (XEXP (SET_SRC (set), 1))
7765 != -INTVAL (XEXP (SET_SRC (set2), 1))))
7766 return;
7767 delete_related_insns (prev);
7768 delete_related_insns (next);
7769}
7770
7771/* Subfunction of delete_address_reloads: process registers found in X. */
7772static void
7773delete_address_reloads_1 (dead_insn, x, current_insn)
7774 rtx dead_insn, x, current_insn;
7775{
7776 rtx prev, set, dst, i2;
7777 int i, j;
7778 enum rtx_code code = GET_CODE (x);
7779
7780 if (code != REG)
7781 {
7782 const char *fmt = GET_RTX_FORMAT (code);
7783 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7784 {
7785 if (fmt[i] == 'e')
7786 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7787 else if (fmt[i] == 'E')
7788 {
7789 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7790 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7791 current_insn);
7792 }
7793 }
7794 return;
7795 }
7796
7797 if (spill_reg_order[REGNO (x)] < 0)
7798 return;
7799
7800 /* Scan backwards for the insn that sets x. This might be a way back due
7801 to inheritance. */
7802 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
7803 {
7804 code = GET_CODE (prev);
7805 if (code == CODE_LABEL || code == JUMP_INSN)
7806 return;
7807 if (GET_RTX_CLASS (code) != 'i')
7808 continue;
7809 if (reg_set_p (x, PATTERN (prev)))
7810 break;
7811 if (reg_referenced_p (x, PATTERN (prev)))
7812 return;
7813 }
7814 if (! prev || INSN_UID (prev) < reload_first_uid)
7815 return;
7816 /* Check that PREV only sets the reload register. */
7817 set = single_set (prev);
7818 if (! set)
7819 return;
7820 dst = SET_DEST (set);
7821 if (GET_CODE (dst) != REG
7822 || ! rtx_equal_p (dst, x))
7823 return;
7824 if (! reg_set_p (dst, PATTERN (dead_insn)))
7825 {
7826 /* Check if DST was used in a later insn -
7827 it might have been inherited. */
7828 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
7829 {
7830 if (GET_CODE (i2) == CODE_LABEL)
7831 break;
7832 if (! INSN_P (i2))
7833 continue;
7834 if (reg_referenced_p (dst, PATTERN (i2)))
7835 {
7836 /* If there is a reference to the register in the current insn,
7837 it might be loaded in a non-inherited reload. If no other
7838 reload uses it, that means the register is set before
7839 referenced. */
7840 if (i2 == current_insn)
7841 {
7842 for (j = n_reloads - 1; j >= 0; j--)
7843 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7844 || reload_override_in[j] == dst)
7845 return;
7846 for (j = n_reloads - 1; j >= 0; j--)
7847 if (rld[j].in && rld[j].reg_rtx == dst)
7848 break;
7849 if (j >= 0)
7850 break;
7851 }
7852 return;
7853 }
7854 if (GET_CODE (i2) == JUMP_INSN)
7855 break;
7856 /* If DST is still live at CURRENT_INSN, check if it is used for
7857 any reload. Note that even if CURRENT_INSN sets DST, we still
7858 have to check the reloads. */
7859 if (i2 == current_insn)
7860 {
7861 for (j = n_reloads - 1; j >= 0; j--)
7862 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7863 || reload_override_in[j] == dst)
7864 return;
7865 /* ??? We can't finish the loop here, because dst might be
7866 allocated to a pseudo in this block if no reload in this
7867 block needs any of the clsses containing DST - see
7868 spill_hard_reg. There is no easy way to tell this, so we
7869 have to scan till the end of the basic block. */
7870 }
7871 if (reg_set_p (dst, PATTERN (i2)))
7872 break;
7873 }
7874 }
7875 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
7876 reg_reloaded_contents[REGNO (dst)] = -1;
7877 delete_insn (prev);
7878}
7879
7880/* Output reload-insns to reload VALUE into RELOADREG.
7881 VALUE is an autoincrement or autodecrement RTX whose operand
7882 is a register or memory location;
7883 so reloading involves incrementing that location.
7884 IN is either identical to VALUE, or some cheaper place to reload from.
7885
7886 INC_AMOUNT is the number to increment or decrement by (always positive).
7887 This cannot be deduced from VALUE.
7888
7889 Return the instruction that stores into RELOADREG. */
7890
7891static rtx
7892inc_for_reload (reloadreg, in, value, inc_amount)
7893 rtx reloadreg;
7894 rtx in, value;
7895 int inc_amount;
7896{
7897 /* REG or MEM to be copied and incremented. */
7898 rtx incloc = XEXP (value, 0);
7899 /* Nonzero if increment after copying. */
7900 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7901 rtx last;
7902 rtx inc;
7903 rtx add_insn;
7904 int code;
7905 rtx store;
7906 rtx real_in = in == value ? XEXP (in, 0) : in;
7907
7908 /* No hard register is equivalent to this register after
7909 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7910 we could inc/dec that register as well (maybe even using it for
7911 the source), but I'm not sure it's worth worrying about. */
7912 if (GET_CODE (incloc) == REG)
7913 reg_last_reload_reg[REGNO (incloc)] = 0;
7914
7915 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7916 inc_amount = -inc_amount;
7917
7918 inc = GEN_INT (inc_amount);
7919
7920 /* If this is post-increment, first copy the location to the reload reg. */
7921 if (post && real_in != reloadreg)
7922 emit_insn (gen_move_insn (reloadreg, real_in));
7923
7924 if (in == value)
7925 {
7926 /* See if we can directly increment INCLOC. Use a method similar to
7927 that in gen_reload. */
7928
7929 last = get_last_insn ();
7930 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7931 gen_rtx_PLUS (GET_MODE (incloc),
7932 incloc, inc)));
7933
7934 code = recog_memoized (add_insn);
7935 if (code >= 0)
7936 {
7937 extract_insn (add_insn);
7938 if (constrain_operands (1))
7939 {
7940 /* If this is a pre-increment and we have incremented the value
7941 where it lives, copy the incremented value to RELOADREG to
7942 be used as an address. */
7943
7944 if (! post)
7945 emit_insn (gen_move_insn (reloadreg, incloc));
7946
7947 return add_insn;
7948 }
7949 }
7950 delete_insns_since (last);
7951 }
7952
7953 /* If couldn't do the increment directly, must increment in RELOADREG.
7954 The way we do this depends on whether this is pre- or post-increment.
7955 For pre-increment, copy INCLOC to the reload register, increment it
7956 there, then save back. */
7957
7958 if (! post)
7959 {
7960 if (in != reloadreg)
7961 emit_insn (gen_move_insn (reloadreg, real_in));
7962 emit_insn (gen_add2_insn (reloadreg, inc));
7963 store = emit_insn (gen_move_insn (incloc, reloadreg));
7964 }
7965 else
7966 {
7967 /* Postincrement.
7968 Because this might be a jump insn or a compare, and because RELOADREG
7969 may not be available after the insn in an input reload, we must do
7970 the incrementation before the insn being reloaded for.
7971
7972 We have already copied IN to RELOADREG. Increment the copy in
7973 RELOADREG, save that back, then decrement RELOADREG so it has
7974 the original value. */
7975
7976 emit_insn (gen_add2_insn (reloadreg, inc));
7977 store = emit_insn (gen_move_insn (incloc, reloadreg));
7978 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7979 }
7980
7981 return store;
7982}
7983
7984/* Return 1 if we are certain that the constraint-string STRING allows
7985 the hard register REG. Return 0 if we can't be sure of this. */
7986
7987static int
7988constraint_accepts_reg_p (string, reg)
7989 const char *string;
7990 rtx reg;
7991{
7992 int value = 0;
7993 int regno = true_regnum (reg);
7994 int c;
7995
7996 /* Initialize for first alternative. */
7997 value = 0;
7998 /* Check that each alternative contains `g' or `r'. */
7999 while (1)
8000 switch (c = *string++)
8001 {
8002 case 0:
8003 /* If an alternative lacks `g' or `r', we lose. */
8004 return value;
8005 case ',':
8006 /* If an alternative lacks `g' or `r', we lose. */
8007 if (value == 0)
8008 return 0;
8009 /* Initialize for next alternative. */
8010 value = 0;
8011 break;
8012 case 'g':
8013 case 'r':
8014 /* Any general reg wins for this alternative. */
8015 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8016 value = 1;
8017 break;
8018 default:
8019 /* Any reg in specified class wins for this alternative. */
8020 {
8021 enum reg_class class = REG_CLASS_FROM_LETTER (c);
8022
8023 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
8024 value = 1;
8025 }
8026 }
8027}
8028
8029/* INSN is a no-op; delete it.
8030 If this sets the return value of the function, we must keep a USE around,
8031 in case this is in a different basic block than the final USE. Otherwise,
8032 we could loose important register lifeness information on
8033 SMALL_REGISTER_CLASSES machines, where return registers might be used as
8034 spills: subsequent passes assume that spill registers are dead at the end
8035 of a basic block.
8036 VALUE must be the return value in such a case, NULL otherwise. */
8037static void
8038reload_cse_delete_noop_set (insn, value)
8039 rtx insn, value;
8040{
8041 bool purge = BLOCK_FOR_INSN (insn)->end == insn;
8042 if (value)
8043 {
8044 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8045 INSN_CODE (insn) = -1;
8046 REG_NOTES (insn) = NULL_RTX;
8047 }
8048 else
8049 delete_insn (insn);
8050 if (purge)
8051 purge_dead_edges (BLOCK_FOR_INSN (insn));
8052}
8053
8054/* See whether a single set SET is a noop. */
8055static int
8056reload_cse_noop_set_p (set)
8057 rtx set;
8058{
8059 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
8060}
8061
8062/* Try to simplify INSN. */
8063static void
8064reload_cse_simplify (insn)
8065 rtx insn;
8066{
8067 rtx body = PATTERN (insn);
8068
8069 if (GET_CODE (body) == SET)
8070 {
8071 int count = 0;
8072
8073 /* Simplify even if we may think it is a no-op.
8074 We may think a memory load of a value smaller than WORD_SIZE
8075 is redundant because we haven't taken into account possible
8076 implicit extension. reload_cse_simplify_set() will bring
8077 this out, so it's safer to simplify before we delete. */
8078 count += reload_cse_simplify_set (body, insn);
8079
8080 if (!count && reload_cse_noop_set_p (body))
8081 {
8082 rtx value = SET_DEST (body);
8083 if (! REG_FUNCTION_VALUE_P (SET_DEST (body)))
8084 value = 0;
8085 reload_cse_delete_noop_set (insn, value);
8086 return;
8087 }
8088
8089 if (count > 0)
8090 apply_change_group ();
8091 else
8092 reload_cse_simplify_operands (insn);
8093 }
8094 else if (GET_CODE (body) == PARALLEL)
8095 {
8096 int i;
8097 int count = 0;
8098 rtx value = NULL_RTX;
8099
8100 /* If every action in a PARALLEL is a noop, we can delete
8101 the entire PARALLEL. */
8102 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8103 {
8104 rtx part = XVECEXP (body, 0, i);
8105 if (GET_CODE (part) == SET)
8106 {
8107 if (! reload_cse_noop_set_p (part))
8108 break;
8109 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8110 {
8111 if (value)
8112 break;
8113 value = SET_DEST (part);
8114 }
8115 }
8116 else if (GET_CODE (part) != CLOBBER)
8117 break;
8118 }
8119
8120 if (i < 0)
8121 {
8122 reload_cse_delete_noop_set (insn, value);
8123 /* We're done with this insn. */
8124 return;
8125 }
8126
8127 /* It's not a no-op, but we can try to simplify it. */
8128 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8129 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8130 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8131
8132 if (count > 0)
8133 apply_change_group ();
8134 else
8135 reload_cse_simplify_operands (insn);
8136 }
8137}
8138
8139/* Do a very simple CSE pass over the hard registers.
8140
8141 This function detects no-op moves where we happened to assign two
8142 different pseudo-registers to the same hard register, and then
8143 copied one to the other. Reload will generate a useless
8144 instruction copying a register to itself.
8145
8146 This function also detects cases where we load a value from memory
8147 into two different registers, and (if memory is more expensive than
8148 registers) changes it to simply copy the first register into the
8149 second register.
8150
8151 Another optimization is performed that scans the operands of each
8152 instruction to see whether the value is already available in a
8153 hard register. It then replaces the operand with the hard register
8154 if possible, much like an optional reload would. */
8155
8156static void
8157reload_cse_regs_1 (first)
8158 rtx first;
8159{
8160 rtx insn;
8161
8162 cselib_init ();
8163 init_alias_analysis ();
8164
8165 for (insn = first; insn; insn = NEXT_INSN (insn))
8166 {
8167 if (INSN_P (insn))
8168 reload_cse_simplify (insn);
8169
8170 cselib_process_insn (insn);
8171 }
8172
8173 /* Clean up. */
8174 end_alias_analysis ();
8175 cselib_finish ();
8176}
8177
8178/* Call cse / combine like post-reload optimization phases.
8179 FIRST is the first instruction. */
8180void
8181reload_cse_regs (first)
8182 rtx first;
8183{
8184 reload_cse_regs_1 (first);
8185 reload_combine ();
8186 reload_cse_move2add (first);
8187 if (flag_expensive_optimizations)
8188 reload_cse_regs_1 (first);
8189}
8190
8191/* Try to simplify a single SET instruction. SET is the set pattern.
8192 INSN is the instruction it came from.
8193 This function only handles one case: if we set a register to a value
8194 which is not a register, we try to find that value in some other register
8195 and change the set into a register copy. */
8196
8197static int
8198reload_cse_simplify_set (set, insn)
8199 rtx set;
8200 rtx insn;
8201{
8202 int did_change = 0;
8203 int dreg;
8204 rtx src;
8205 enum reg_class dclass;
8206 int old_cost;
8207 cselib_val *val;
8208 struct elt_loc_list *l;
8209#ifdef LOAD_EXTEND_OP
8210 enum rtx_code extend_op = NIL;
8211#endif
8212
8213 dreg = true_regnum (SET_DEST (set));
8214 if (dreg < 0)
8215 return 0;
8216
8217 src = SET_SRC (set);
8218 if (side_effects_p (src) || true_regnum (src) >= 0)
8219 return 0;
8220
8221 dclass = REGNO_REG_CLASS (dreg);
8222
8223#ifdef LOAD_EXTEND_OP
8224 /* When replacing a memory with a register, we need to honor assumptions
8225 that combine made wrt the contents of sign bits. We'll do this by
8226 generating an extend instruction instead of a reg->reg copy. Thus
8227 the destination must be a register that we can widen. */
8228 if (GET_CODE (src) == MEM
8229 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
8230 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != NIL
8231 && GET_CODE (SET_DEST (set)) != REG)
8232 return 0;
8233#endif
8234
8235 /* If memory loads are cheaper than register copies, don't change them. */
8236 if (GET_CODE (src) == MEM)
8237 old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
8238 else if (CONSTANT_P (src))
8239 old_cost = rtx_cost (src, SET);
8240 else if (GET_CODE (src) == REG)
8241 old_cost = REGISTER_MOVE_COST (GET_MODE (src),
8242 REGNO_REG_CLASS (REGNO (src)), dclass);
8243 else
8244 /* ??? */
8245 old_cost = rtx_cost (src, SET);
8246
8247 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
8248 if (! val)
8249 return 0;
8250 for (l = val->locs; l; l = l->next)
8251 {
8252 rtx this_rtx = l->loc;
8253 int this_cost;
8254
8255 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
8256 {
8257#ifdef LOAD_EXTEND_OP
8258 if (extend_op != NIL)
8259 {
8260 HOST_WIDE_INT this_val;
8261
8262 /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other
8263 constants, such as SYMBOL_REF, cannot be extended. */
8264 if (GET_CODE (this_rtx) != CONST_INT)
8265 continue;
8266
8267 this_val = INTVAL (this_rtx);
8268 switch (extend_op)
8269 {
8270 case ZERO_EXTEND:
8271 this_val &= GET_MODE_MASK (GET_MODE (src));
8272 break;
8273 case SIGN_EXTEND:
8274 /* ??? In theory we're already extended. */
8275 if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
8276 break;
8277 default:
8278 abort ();
8279 }
8280 this_rtx = GEN_INT (this_val);
8281 }
8282#endif
8283 this_cost = rtx_cost (this_rtx, SET);
8284 }
8285 else if (GET_CODE (this_rtx) == REG)
8286 {
8287#ifdef LOAD_EXTEND_OP
8288 if (extend_op != NIL)
8289 {
8290 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
8291 this_cost = rtx_cost (this_rtx, SET);
8292 }
8293 else
8294#endif
8295 this_cost = REGISTER_MOVE_COST (GET_MODE (this_rtx),
8296 REGNO_REG_CLASS (REGNO (this_rtx)),
8297 dclass);
8298 }
8299 else
8300 continue;
8301
8302 /* If equal costs, prefer registers over anything else. That
8303 tends to lead to smaller instructions on some machines. */
8304 if (this_cost < old_cost
8305 || (this_cost == old_cost
8306 && GET_CODE (this_rtx) == REG
8307 && GET_CODE (SET_SRC (set)) != REG))
8308 {
8309#ifdef LOAD_EXTEND_OP
8310 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
8311 && extend_op != NIL)
8312 {
8313 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
8314 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
8315 validate_change (insn, &SET_DEST (set), wide_dest, 1);
8316 }
8317#endif
8318
8319 validate_change (insn, &SET_SRC (set), copy_rtx (this_rtx), 1);
8320 old_cost = this_cost, did_change = 1;
8321 }
8322 }
8323
8324 return did_change;
8325}
8326
8327/* Try to replace operands in INSN with equivalent values that are already
8328 in registers. This can be viewed as optional reloading.
8329
8330 For each non-register operand in the insn, see if any hard regs are
8331 known to be equivalent to that operand. Record the alternatives which
8332 can accept these hard registers. Among all alternatives, select the
8333 ones which are better or equal to the one currently matching, where
8334 "better" is in terms of '?' and '!' constraints. Among the remaining
8335 alternatives, select the one which replaces most operands with
8336 hard registers. */
8337
8338static int
8339reload_cse_simplify_operands (insn)
8340 rtx insn;
8341{
8342 int i, j;
8343
8344 /* For each operand, all registers that are equivalent to it. */
8345 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
8346
8347 const char *constraints[MAX_RECOG_OPERANDS];
8348
8349 /* Vector recording how bad an alternative is. */
8350 int *alternative_reject;
8351 /* Vector recording how many registers can be introduced by choosing
8352 this alternative. */
8353 int *alternative_nregs;
8354 /* Array of vectors recording, for each operand and each alternative,
8355 which hard register to substitute, or -1 if the operand should be
8356 left as it is. */
8357 int *op_alt_regno[MAX_RECOG_OPERANDS];
8358 /* Array of alternatives, sorted in order of decreasing desirability. */
8359 int *alternative_order;
8360 rtx reg = gen_rtx_REG (VOIDmode, -1);
8361
8362 extract_insn (insn);
8363
8364 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
8365 return 0;
8366
8367 /* Figure out which alternative currently matches. */
8368 if (! constrain_operands (1))
8369 fatal_insn_not_found (insn);
8370
8371 alternative_reject = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8372 alternative_nregs = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8373 alternative_order = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8374 memset ((char *) alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
8375 memset ((char *) alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
8376
8377 /* For each operand, find out which regs are equivalent. */
8378 for (i = 0; i < recog_data.n_operands; i++)
8379 {
8380 cselib_val *v;
8381 struct elt_loc_list *l;
8382
8383 CLEAR_HARD_REG_SET (equiv_regs[i]);
8384
8385 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
8386 right, so avoid the problem here. Likewise if we have a constant
8387 and the insn pattern doesn't tell us the mode we need. */
8388 if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
8389 || (CONSTANT_P (recog_data.operand[i])
8390 && recog_data.operand_mode[i] == VOIDmode))
8391 continue;
8392
8393 v = cselib_lookup (recog_data.operand[i], recog_data.operand_mode[i], 0);
8394 if (! v)
8395 continue;
8396
8397 for (l = v->locs; l; l = l->next)
8398 if (GET_CODE (l->loc) == REG)
8399 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
8400 }
8401
8402 for (i = 0; i < recog_data.n_operands; i++)
8403 {
8404 enum machine_mode mode;
8405 int regno;
8406 const char *p;
8407
8408 op_alt_regno[i] = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8409 for (j = 0; j < recog_data.n_alternatives; j++)
8410 op_alt_regno[i][j] = -1;
8411
8412 p = constraints[i] = recog_data.constraints[i];
8413 mode = recog_data.operand_mode[i];
8414
8415 /* Add the reject values for each alternative given by the constraints
8416 for this operand. */
8417 j = 0;
8418 while (*p != '\0')
8419 {
8420 char c = *p++;
8421 if (c == ',')
8422 j++;
8423 else if (c == '?')
8424 alternative_reject[j] += 3;
8425 else if (c == '!')
8426 alternative_reject[j] += 300;
8427 }
8428
8429 /* We won't change operands which are already registers. We
8430 also don't want to modify output operands. */
8431 regno = true_regnum (recog_data.operand[i]);
8432 if (regno >= 0
8433 || constraints[i][0] == '='
8434 || constraints[i][0] == '+')
8435 continue;
8436
8437 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8438 {
8439 int class = (int) NO_REGS;
8440
8441 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
8442 continue;
8443
8444 REGNO (reg) = regno;
8445 PUT_MODE (reg, mode);
8446
8447 /* We found a register equal to this operand. Now look for all
8448 alternatives that can accept this register and have not been
8449 assigned a register they can use yet. */
8450 j = 0;
8451 p = constraints[i];
8452 for (;;)
8453 {
8454 char c = *p++;
8455
8456 switch (c)
8457 {
8458 case '=': case '+': case '?':
8459 case '#': case '&': case '!':
8460 case '*': case '%':
8461 case '0': case '1': case '2': case '3': case '4':
8462 case '5': case '6': case '7': case '8': case '9':
8463 case 'm': case '<': case '>': case 'V': case 'o':
8464 case 'E': case 'F': case 'G': case 'H':
8465 case 's': case 'i': case 'n':
8466 case 'I': case 'J': case 'K': case 'L':
8467 case 'M': case 'N': case 'O': case 'P':
8468 case 'p': case 'X':
8469 /* These don't say anything we care about. */
8470 break;
8471
8472 case 'g': case 'r':
8473 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8474 break;
8475
8476 default:
8477 class
8478 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
8479 break;
8480
8481 case ',': case '\0':
8482 /* See if REGNO fits this alternative, and set it up as the
8483 replacement register if we don't have one for this
8484 alternative yet and the operand being replaced is not
8485 a cheap CONST_INT. */
8486 if (op_alt_regno[i][j] == -1
8487 && reg_fits_class_p (reg, class, 0, mode)
8488 && (GET_CODE (recog_data.operand[i]) != CONST_INT
8489 || (rtx_cost (recog_data.operand[i], SET)
8490 > rtx_cost (reg, SET))))
8491 {
8492 alternative_nregs[j]++;
8493 op_alt_regno[i][j] = regno;
8494 }
8495 j++;
8496 break;
8497 }
8498
8499 if (c == '\0')
8500 break;
8501 }
8502 }
8503 }
8504
8505 /* Record all alternatives which are better or equal to the currently
8506 matching one in the alternative_order array. */
8507 for (i = j = 0; i < recog_data.n_alternatives; i++)
8508 if (alternative_reject[i] <= alternative_reject[which_alternative])
8509 alternative_order[j++] = i;
8510 recog_data.n_alternatives = j;
8511
8512 /* Sort it. Given a small number of alternatives, a dumb algorithm
8513 won't hurt too much. */
8514 for (i = 0; i < recog_data.n_alternatives - 1; i++)
8515 {
8516 int best = i;
8517 int best_reject = alternative_reject[alternative_order[i]];
8518 int best_nregs = alternative_nregs[alternative_order[i]];
8519 int tmp;
8520
8521 for (j = i + 1; j < recog_data.n_alternatives; j++)
8522 {
8523 int this_reject = alternative_reject[alternative_order[j]];
8524 int this_nregs = alternative_nregs[alternative_order[j]];
8525
8526 if (this_reject < best_reject
8527 || (this_reject == best_reject && this_nregs < best_nregs))
8528 {
8529 best = j;
8530 best_reject = this_reject;
8531 best_nregs = this_nregs;
8532 }
8533 }
8534
8535 tmp = alternative_order[best];
8536 alternative_order[best] = alternative_order[i];
8537 alternative_order[i] = tmp;
8538 }
8539
8540 /* Substitute the operands as determined by op_alt_regno for the best
8541 alternative. */
8542 j = alternative_order[0];
8543
8544 for (i = 0; i < recog_data.n_operands; i++)
8545 {
8546 enum machine_mode mode = recog_data.operand_mode[i];
8547 if (op_alt_regno[i][j] == -1)
8548 continue;
8549
8550 validate_change (insn, recog_data.operand_loc[i],
8551 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
8552 }
8553
8554 for (i = recog_data.n_dups - 1; i >= 0; i--)
8555 {
8556 int op = recog_data.dup_num[i];
8557 enum machine_mode mode = recog_data.operand_mode[op];
8558
8559 if (op_alt_regno[op][j] == -1)
8560 continue;
8561
8562 validate_change (insn, recog_data.dup_loc[i],
8563 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
8564 }
8565
8566 return apply_change_group ();
8567}
8568
8569/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
8570 addressing now.
8571 This code might also be useful when reload gave up on reg+reg addresssing
8572 because of clashes between the return register and INDEX_REG_CLASS. */
8573
8574/* The maximum number of uses of a register we can keep track of to
8575 replace them with reg+reg addressing. */
8576#define RELOAD_COMBINE_MAX_USES 6
8577
8578/* INSN is the insn where a register has ben used, and USEP points to the
8579 location of the register within the rtl. */
8580struct reg_use { rtx insn, *usep; };
8581
8582/* If the register is used in some unknown fashion, USE_INDEX is negative.
8583 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
8584 indicates where it becomes live again.
8585 Otherwise, USE_INDEX is the index of the last encountered use of the
8586 register (which is first among these we have seen since we scan backwards),
8587 OFFSET contains the constant offset that is added to the register in
8588 all encountered uses, and USE_RUID indicates the first encountered, i.e.
8589 last, of these uses.
8590 STORE_RUID is always meaningful if we only want to use a value in a
8591 register in a different place: it denotes the next insn in the insn
8592 stream (i.e. the last ecountered) that sets or clobbers the register. */
8593static struct
8594 {
8595 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
8596 int use_index;
8597 rtx offset;
8598 int store_ruid;
8599 int use_ruid;
8600 } reg_state[FIRST_PSEUDO_REGISTER];
8601
8602/* Reverse linear uid. This is increased in reload_combine while scanning
8603 the instructions from last to first. It is used to set last_label_ruid
8604 and the store_ruid / use_ruid fields in reg_state. */
8605static int reload_combine_ruid;
8606
8607#define LABEL_LIVE(LABEL) \
8608 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
8609
8610static void
8611reload_combine ()
8612{
8613 rtx insn, set;
8614 int first_index_reg = -1;
8615 int last_index_reg = 0;
8616 int i;
8617 unsigned int r;
8618 int last_label_ruid;
8619 int min_labelno, n_labels;
8620 HARD_REG_SET ever_live_at_start, *label_live;
8621
8622 /* If reg+reg can be used in offsetable memory addresses, the main chunk of
8623 reload has already used it where appropriate, so there is no use in
8624 trying to generate it now. */
8625 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
8626 return;
8627
8628 /* To avoid wasting too much time later searching for an index register,
8629 determine the minimum and maximum index register numbers. */
8630 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8631 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
8632 {
8633 if (first_index_reg == -1)
8634 first_index_reg = r;
8635
8636 last_index_reg = r;
8637 }
8638
8639 /* If no index register is available, we can quit now. */
8640 if (first_index_reg == -1)
8641 return;
8642
8643 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
8644 information is a bit fuzzy immediately after reload, but it's
8645 still good enough to determine which registers are live at a jump
8646 destination. */
8647 min_labelno = get_first_label_num ();
8648 n_labels = max_label_num () - min_labelno;
8649 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
8650 CLEAR_HARD_REG_SET (ever_live_at_start);
8651
8652 for (i = n_basic_blocks - 1; i >= 0; i--)
8653 {
8654 insn = BLOCK_HEAD (i);
8655 if (GET_CODE (insn) == CODE_LABEL)
8656 {
8657 HARD_REG_SET live;
8658
8659 REG_SET_TO_HARD_REG_SET (live,
8660 BASIC_BLOCK (i)->global_live_at_start);
8661 compute_use_by_pseudos (&live,
8662 BASIC_BLOCK (i)->global_live_at_start);
8663 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
8664 IOR_HARD_REG_SET (ever_live_at_start, live);
8665 }
8666 }
8667
8668 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
8669 last_label_ruid = reload_combine_ruid = 0;
8670 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8671 {
8672 reg_state[r].store_ruid = reload_combine_ruid;
8673 if (fixed_regs[r])
8674 reg_state[r].use_index = -1;
8675 else
8676 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8677 }
8678
8679 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8680 {
8681 rtx note;
8682
8683 /* We cannot do our optimization across labels. Invalidating all the use
8684 information we have would be costly, so we just note where the label
8685 is and then later disable any optimization that would cross it. */
8686 if (GET_CODE (insn) == CODE_LABEL)
8687 last_label_ruid = reload_combine_ruid;
8688 else if (GET_CODE (insn) == BARRIER)
8689 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8690 if (! fixed_regs[r])
8691 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8692
8693 if (! INSN_P (insn))
8694 continue;
8695
8696 reload_combine_ruid++;
8697
8698 /* Look for (set (REGX) (CONST_INT))
8699 (set (REGX) (PLUS (REGX) (REGY)))
8700 ...
8701 ... (MEM (REGX)) ...
8702 and convert it to
8703 (set (REGZ) (CONST_INT))
8704 ...
8705 ... (MEM (PLUS (REGZ) (REGY)))... .
8706
8707 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
8708 and that we know all uses of REGX before it dies. */
8709 set = single_set (insn);
8710 if (set != NULL_RTX
8711 && GET_CODE (SET_DEST (set)) == REG
8712 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
8713 GET_MODE (SET_DEST (set)))
8714 == 1)
8715 && GET_CODE (SET_SRC (set)) == PLUS
8716 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
8717 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
8718 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
8719 {
8720 rtx reg = SET_DEST (set);
8721 rtx plus = SET_SRC (set);
8722 rtx base = XEXP (plus, 1);
8723 rtx prev = prev_nonnote_insn (insn);
8724 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
8725 unsigned int regno = REGNO (reg);
8726 rtx const_reg = NULL_RTX;
8727 rtx reg_sum = NULL_RTX;
8728
8729 /* Now, we need an index register.
8730 We'll set index_reg to this index register, const_reg to the
8731 register that is to be loaded with the constant
8732 (denoted as REGZ in the substitution illustration above),
8733 and reg_sum to the register-register that we want to use to
8734 substitute uses of REG (typically in MEMs) with.
8735 First check REG and BASE for being index registers;
8736 we can use them even if they are not dead. */
8737 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
8738 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8739 REGNO (base)))
8740 {
8741 const_reg = reg;
8742 reg_sum = plus;
8743 }
8744 else
8745 {
8746 /* Otherwise, look for a free index register. Since we have
8747 checked above that neiter REG nor BASE are index registers,
8748 if we find anything at all, it will be different from these
8749 two registers. */
8750 for (i = first_index_reg; i <= last_index_reg; i++)
8751 {
8752 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8753 i)
8754 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
8755 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
8756 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
8757 {
8758 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
8759
8760 const_reg = index_reg;
8761 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
8762 break;
8763 }
8764 }
8765 }
8766
8767 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
8768 (REGY), i.e. BASE, is not clobbered before the last use we'll
8769 create. */
8770 if (prev_set != 0
8771 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
8772 && rtx_equal_p (SET_DEST (prev_set), reg)
8773 && reg_state[regno].use_index >= 0
8774 && (reg_state[REGNO (base)].store_ruid
8775 <= reg_state[regno].use_ruid)
8776 && reg_sum != 0)
8777 {
8778 int i;
8779
8780 /* Change destination register and, if necessary, the
8781 constant value in PREV, the constant loading instruction. */
8782 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
8783 if (reg_state[regno].offset != const0_rtx)
8784 validate_change (prev,
8785 &SET_SRC (prev_set),
8786 GEN_INT (INTVAL (SET_SRC (prev_set))
8787 + INTVAL (reg_state[regno].offset)),
8788 1);
8789
8790 /* Now for every use of REG that we have recorded, replace REG
8791 with REG_SUM. */
8792 for (i = reg_state[regno].use_index;
8793 i < RELOAD_COMBINE_MAX_USES; i++)
8794 validate_change (reg_state[regno].reg_use[i].insn,
8795 reg_state[regno].reg_use[i].usep,
8796 /* Each change must have its own
8797 replacement. */
8798 copy_rtx (reg_sum), 1);
8799
8800 if (apply_change_group ())
8801 {
8802 rtx *np;
8803
8804 /* Delete the reg-reg addition. */
8805 delete_insn (insn);
8806
8807 if (reg_state[regno].offset != const0_rtx)
8808 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
8809 are now invalid. */
8810 for (np = &REG_NOTES (prev); *np;)
8811 {
8812 if (REG_NOTE_KIND (*np) == REG_EQUAL
8813 || REG_NOTE_KIND (*np) == REG_EQUIV)
8814 *np = XEXP (*np, 1);
8815 else
8816 np = &XEXP (*np, 1);
8817 }
8818
8819 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
8820 reg_state[REGNO (const_reg)].store_ruid
8821 = reload_combine_ruid;
8822 continue;
8823 }
8824 }
8825 }
8826
8827 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
8828
8829 if (GET_CODE (insn) == CALL_INSN)
8830 {
8831 rtx link;
8832
8833 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8834 if (call_used_regs[r])
8835 {
8836 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8837 reg_state[r].store_ruid = reload_combine_ruid;
8838 }
8839
8840 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
8841 link = XEXP (link, 1))
8842 {
8843 rtx usage_rtx = XEXP (XEXP (link, 0), 0);
8844 if (GET_CODE (usage_rtx) == REG)
8845 {
8846 unsigned int i;
8847 unsigned int start_reg = REGNO (usage_rtx);
8848 unsigned int num_regs =
8849 HARD_REGNO_NREGS (start_reg, GET_MODE (usage_rtx));
8850 unsigned int end_reg = start_reg + num_regs - 1;
8851 for (i = start_reg; i <= end_reg; i++)
8852 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
8853 {
8854 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
8855 reg_state[i].store_ruid = reload_combine_ruid;
8856 }
8857 else
8858 reg_state[i].use_index = -1;
8859 }
8860 }
8861
8862 }
8863 else if (GET_CODE (insn) == JUMP_INSN
8864 && GET_CODE (PATTERN (insn)) != RETURN)
8865 {
8866 /* Non-spill registers might be used at the call destination in
8867 some unknown fashion, so we have to mark the unknown use. */
8868 HARD_REG_SET *live;
8869
8870 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
8871 && JUMP_LABEL (insn))
8872 live = &LABEL_LIVE (JUMP_LABEL (insn));
8873 else
8874 live = &ever_live_at_start;
8875
8876 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
8877 if (TEST_HARD_REG_BIT (*live, i))
8878 reg_state[i].use_index = -1;
8879 }
8880
8881 reload_combine_note_use (&PATTERN (insn), insn);
8882 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
8883 {
8884 if (REG_NOTE_KIND (note) == REG_INC
8885 && GET_CODE (XEXP (note, 0)) == REG)
8886 {
8887 int regno = REGNO (XEXP (note, 0));
8888
8889 reg_state[regno].store_ruid = reload_combine_ruid;
8890 reg_state[regno].use_index = -1;
8891 }
8892 }
8893 }
8894
8895 free (label_live);
8896}
8897
8898/* Check if DST is a register or a subreg of a register; if it is,
8899 update reg_state[regno].store_ruid and reg_state[regno].use_index
8900 accordingly. Called via note_stores from reload_combine. */
8901
8902static void
8903reload_combine_note_store (dst, set, data)
8904 rtx dst, set;
8905 void *data ATTRIBUTE_UNUSED;
8906{
8907 int regno = 0;
8908 int i;
8909 enum machine_mode mode = GET_MODE (dst);
8910
8911 if (GET_CODE (dst) == SUBREG)
8912 {
8913 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
8914 GET_MODE (SUBREG_REG (dst)),
8915 SUBREG_BYTE (dst),
8916 GET_MODE (dst));
8917 dst = SUBREG_REG (dst);
8918 }
8919 if (GET_CODE (dst) != REG)
8920 return;
8921 regno += REGNO (dst);
8922
8923 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
8924 careful with registers / register parts that are not full words.
8925
8926 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */
8927 if (GET_CODE (set) != SET
8928 || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
8929 || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
8930 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
8931 {
8932 for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
8933 {
8934 reg_state[i].use_index = -1;
8935 reg_state[i].store_ruid = reload_combine_ruid;
8936 }
8937 }
8938 else
8939 {
8940 for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
8941 {
8942 reg_state[i].store_ruid = reload_combine_ruid;
8943 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
8944 }
8945 }
8946}
8947
8948/* XP points to a piece of rtl that has to be checked for any uses of
8949 registers.
8950 *XP is the pattern of INSN, or a part of it.
8951 Called from reload_combine, and recursively by itself. */
8952static void
8953reload_combine_note_use (xp, insn)
8954 rtx *xp, insn;
8955{
8956 rtx x = *xp;
8957 enum rtx_code code = x->code;
8958 const char *fmt;
8959 int i, j;
8960 rtx offset = const0_rtx; /* For the REG case below. */
8961
8962 switch (code)
8963 {
8964 case SET:
8965 if (GET_CODE (SET_DEST (x)) == REG)
8966 {
8967 reload_combine_note_use (&SET_SRC (x), insn);
8968 return;
8969 }
8970 break;
8971
8972 case USE:
8973 /* If this is the USE of a return value, we can't change it. */
8974 if (GET_CODE (XEXP (x, 0)) == REG && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
8975 {
8976 /* Mark the return register as used in an unknown fashion. */
8977 rtx reg = XEXP (x, 0);
8978 int regno = REGNO (reg);
8979 int nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg));
8980
8981 while (--nregs >= 0)
8982 reg_state[regno + nregs].use_index = -1;
8983 return;
8984 }
8985 break;
8986
8987 case CLOBBER:
8988 if (GET_CODE (SET_DEST (x)) == REG)
8989 {
8990 /* No spurious CLOBBERs of pseudo registers may remain. */
8991 if (REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER)
8992 abort ();
8993 return;
8994 }
8995 break;
8996
8997 case PLUS:
8998 /* We are interested in (plus (reg) (const_int)) . */
8999 if (GET_CODE (XEXP (x, 0)) != REG
9000 || GET_CODE (XEXP (x, 1)) != CONST_INT)
9001 break;
9002 offset = XEXP (x, 1);
9003 x = XEXP (x, 0);
9004 /* Fall through. */
9005 case REG:
9006 {
9007 int regno = REGNO (x);
9008 int use_index;
9009 int nregs;
9010
9011 /* No spurious USEs of pseudo registers may remain. */
9012 if (regno >= FIRST_PSEUDO_REGISTER)
9013 abort ();
9014
9015 nregs = HARD_REGNO_NREGS (regno, GET_MODE (x));
9016
9017 /* We can't substitute into multi-hard-reg uses. */
9018 if (nregs > 1)
9019 {
9020 while (--nregs >= 0)
9021 reg_state[regno + nregs].use_index = -1;
9022 return;
9023 }
9024
9025 /* If this register is already used in some unknown fashion, we
9026 can't do anything.
9027 If we decrement the index from zero to -1, we can't store more
9028 uses, so this register becomes used in an unknown fashion. */
9029 use_index = --reg_state[regno].use_index;
9030 if (use_index < 0)
9031 return;
9032
9033 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9034 {
9035 /* We have found another use for a register that is already
9036 used later. Check if the offsets match; if not, mark the
9037 register as used in an unknown fashion. */
9038 if (! rtx_equal_p (offset, reg_state[regno].offset))
9039 {
9040 reg_state[regno].use_index = -1;
9041 return;
9042 }
9043 }
9044 else
9045 {
9046 /* This is the first use of this register we have seen since we
9047 marked it as dead. */
9048 reg_state[regno].offset = offset;
9049 reg_state[regno].use_ruid = reload_combine_ruid;
9050 }
9051 reg_state[regno].reg_use[use_index].insn = insn;
9052 reg_state[regno].reg_use[use_index].usep = xp;
9053 return;
9054 }
9055
9056 default:
9057 break;
9058 }
9059
9060 /* Recursively process the components of X. */
9061 fmt = GET_RTX_FORMAT (code);
9062 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9063 {
9064 if (fmt[i] == 'e')
9065 reload_combine_note_use (&XEXP (x, i), insn);
9066 else if (fmt[i] == 'E')
9067 {
9068 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9069 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9070 }
9071 }
9072}
9073
9074/* See if we can reduce the cost of a constant by replacing a move
9075 with an add. We track situations in which a register is set to a
9076 constant or to a register plus a constant. */
9077/* We cannot do our optimization across labels. Invalidating all the
9078 information about register contents we have would be costly, so we
9079 use move2add_last_label_luid to note where the label is and then
9080 later disable any optimization that would cross it.
9081 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9082 reg_set_luid[n] is greater than last_label_luid[n] . */
9083static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9084
9085/* If reg_base_reg[n] is negative, register n has been set to
9086 reg_offset[n] in mode reg_mode[n] .
9087 If reg_base_reg[n] is non-negative, register n has been set to the
9088 sum of reg_offset[n] and the value of register reg_base_reg[n]
9089 before reg_set_luid[n], calculated in mode reg_mode[n] . */
9090static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
9091static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9092static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9093
9094/* move2add_luid is linearily increased while scanning the instructions
9095 from first to last. It is used to set reg_set_luid in
9096 reload_cse_move2add and move2add_note_store. */
9097static int move2add_luid;
9098
9099/* move2add_last_label_luid is set whenever a label is found. Labels
9100 invalidate all previously collected reg_offset data. */
9101static int move2add_last_label_luid;
9102
9103/* Generate a CONST_INT and force it in the range of MODE. */
9104
9105static HOST_WIDE_INT
9106sext_for_mode (mode, value)
9107 enum machine_mode mode;
9108 HOST_WIDE_INT value;
9109{
9110 HOST_WIDE_INT cval = value & GET_MODE_MASK (mode);
9111 int width = GET_MODE_BITSIZE (mode);
9112
9113 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number,
9114 sign extend it. */
9115 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
9116 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
9117 cval |= (HOST_WIDE_INT) -1 << width;
9118
9119 return cval;
9120}
9121
9122/* ??? We don't know how zero / sign extension is handled, hence we
9123 can't go from a narrower to a wider mode. */
9124#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
9125 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
9126 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
9127 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
9128 GET_MODE_BITSIZE (INMODE))))
9129
9130static void
9131reload_cse_move2add (first)
9132 rtx first;
9133{
9134 int i;
9135 rtx insn;
9136
9137 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9138 reg_set_luid[i] = 0;
9139
9140 move2add_last_label_luid = 0;
9141 move2add_luid = 2;
9142 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9143 {
9144 rtx pat, note;
9145
9146 if (GET_CODE (insn) == CODE_LABEL)
9147 {
9148 move2add_last_label_luid = move2add_luid;
9149 /* We're going to increment move2add_luid twice after a
9150 label, so that we can use move2add_last_label_luid + 1 as
9151 the luid for constants. */
9152 move2add_luid++;
9153 continue;
9154 }
9155 if (! INSN_P (insn))
9156 continue;
9157 pat = PATTERN (insn);
9158 /* For simplicity, we only perform this optimization on
9159 straightforward SETs. */
9160 if (GET_CODE (pat) == SET
9161 && GET_CODE (SET_DEST (pat)) == REG)
9162 {
9163 rtx reg = SET_DEST (pat);
9164 int regno = REGNO (reg);
9165 rtx src = SET_SRC (pat);
9166
9167 /* Check if we have valid information on the contents of this
9168 register in the mode of REG. */
9169 if (reg_set_luid[regno] > move2add_last_label_luid
9170 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno]))
9171 {
9172 /* Try to transform (set (REGX) (CONST_INT A))
9173 ...
9174 (set (REGX) (CONST_INT B))
9175 to
9176 (set (REGX) (CONST_INT A))
9177 ...
9178 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9179
9180 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9181 {
9182 int success = 0;
9183 rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9184 INTVAL (src)
9185 - reg_offset[regno]));
9186 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9187 use (set (reg) (reg)) instead.
9188 We don't delete this insn, nor do we convert it into a
9189 note, to avoid losing register notes or the return
9190 value flag. jump2 already knowns how to get rid of
9191 no-op moves. */
9192 if (new_src == const0_rtx)
9193 success = validate_change (insn, &SET_SRC (pat), reg, 0);
9194 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9195 && have_add2_insn (reg, new_src))
9196 success = validate_change (insn, &PATTERN (insn),
9197 gen_add2_insn (reg, new_src), 0);
9198 reg_set_luid[regno] = move2add_luid;
9199 reg_mode[regno] = GET_MODE (reg);
9200 reg_offset[regno] = INTVAL (src);
9201 continue;
9202 }
9203
9204 /* Try to transform (set (REGX) (REGY))
9205 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9206 ...
9207 (set (REGX) (REGY))
9208 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9209 to
9210 (REGX) (REGY))
9211 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9212 ...
9213 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9214 else if (GET_CODE (src) == REG
9215 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
9216 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
9217 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
9218 reg_mode[REGNO (src)]))
9219 {
9220 rtx next = next_nonnote_insn (insn);
9221 rtx set = NULL_RTX;
9222 if (next)
9223 set = single_set (next);
9224 if (set
9225 && SET_DEST (set) == reg
9226 && GET_CODE (SET_SRC (set)) == PLUS
9227 && XEXP (SET_SRC (set), 0) == reg
9228 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9229 {
9230 rtx src3 = XEXP (SET_SRC (set), 1);
9231 HOST_WIDE_INT added_offset = INTVAL (src3);
9232 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
9233 HOST_WIDE_INT regno_offset = reg_offset[regno];
9234 rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9235 added_offset
9236 + base_offset
9237 - regno_offset));
9238 int success = 0;
9239
9240 if (new_src == const0_rtx)
9241 /* See above why we create (set (reg) (reg)) here. */
9242 success
9243 = validate_change (next, &SET_SRC (set), reg, 0);
9244 else if ((rtx_cost (new_src, PLUS)
9245 < COSTS_N_INSNS (1) + rtx_cost (src3, SET))
9246 && have_add2_insn (reg, new_src))
9247 success
9248 = validate_change (next, &PATTERN (next),
9249 gen_add2_insn (reg, new_src), 0);
9250 if (success)
9251 delete_insn (insn);
9252 insn = next;
9253 reg_mode[regno] = GET_MODE (reg);
9254 reg_offset[regno] = sext_for_mode (GET_MODE (reg),
9255 added_offset
9256 + base_offset);
9257 continue;
9258 }
9259 }
9260 }
9261 }
9262
9263 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9264 {
9265 if (REG_NOTE_KIND (note) == REG_INC
9266 && GET_CODE (XEXP (note, 0)) == REG)
9267 {
9268 /* Reset the information about this register. */
9269 int regno = REGNO (XEXP (note, 0));
9270 if (regno < FIRST_PSEUDO_REGISTER)
9271 reg_set_luid[regno] = 0;
9272 }
9273 }
9274 note_stores (PATTERN (insn), move2add_note_store, NULL);
9275 /* If this is a CALL_INSN, all call used registers are stored with
9276 unknown values. */
9277 if (GET_CODE (insn) == CALL_INSN)
9278 {
9279 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9280 {
9281 if (call_used_regs[i])
9282 /* Reset the information about this register. */
9283 reg_set_luid[i] = 0;
9284 }
9285 }
9286 }
9287}
9288
9289/* SET is a SET or CLOBBER that sets DST.
9290 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9291 Called from reload_cse_move2add via note_stores. */
9292
9293static void
9294move2add_note_store (dst, set, data)
9295 rtx dst, set;
9296 void *data ATTRIBUTE_UNUSED;
9297{
9298 unsigned int regno = 0;
9299 unsigned int i;
9300 enum machine_mode mode = GET_MODE (dst);
9301
9302 if (GET_CODE (dst) == SUBREG)
9303 {
9304 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
9305 GET_MODE (SUBREG_REG (dst)),
9306 SUBREG_BYTE (dst),
9307 GET_MODE (dst));
9308 dst = SUBREG_REG (dst);
9309 }
9310
9311 /* Some targets do argument pushes without adding REG_INC notes. */
9312
9313 if (GET_CODE (dst) == MEM)
9314 {
9315 dst = XEXP (dst, 0);
9316 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
9317 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
9318 reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
9319 return;
9320 }
9321 if (GET_CODE (dst) != REG)
9322 return;
9323
9324 regno += REGNO (dst);
9325
9326 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
9327 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
9328 && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
9329 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
9330 {
9331 rtx src = SET_SRC (set);
9332 rtx base_reg;
9333 HOST_WIDE_INT offset;
9334 int base_regno;
9335 /* This may be different from mode, if SET_DEST (set) is a
9336 SUBREG. */
9337 enum machine_mode dst_mode = GET_MODE (dst);
9338
9339 switch (GET_CODE (src))
9340 {
9341 case PLUS:
9342 if (GET_CODE (XEXP (src, 0)) == REG)
9343 {
9344 base_reg = XEXP (src, 0);
9345
9346 if (GET_CODE (XEXP (src, 1)) == CONST_INT)
9347 offset = INTVAL (XEXP (src, 1));
9348 else if (GET_CODE (XEXP (src, 1)) == REG
9349 && (reg_set_luid[REGNO (XEXP (src, 1))]
9350 > move2add_last_label_luid)
9351 && (MODES_OK_FOR_MOVE2ADD
9352 (dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
9353 {
9354 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
9355 offset = reg_offset[REGNO (XEXP (src, 1))];
9356 /* Maybe the first register is known to be a
9357 constant. */
9358 else if (reg_set_luid[REGNO (base_reg)]
9359 > move2add_last_label_luid
9360 && (MODES_OK_FOR_MOVE2ADD
9361 (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
9362 && reg_base_reg[REGNO (base_reg)] < 0)
9363 {
9364 offset = reg_offset[REGNO (base_reg)];
9365 base_reg = XEXP (src, 1);
9366 }
9367 else
9368 goto invalidate;
9369 }
9370 else
9371 goto invalidate;
9372
9373 break;
9374 }
9375
9376 goto invalidate;
9377
9378 case REG:
9379 base_reg = src;
9380 offset = 0;
9381 break;
9382
9383 case CONST_INT:
9384 /* Start tracking the register as a constant. */
9385 reg_base_reg[regno] = -1;
9386 reg_offset[regno] = INTVAL (SET_SRC (set));
9387 /* We assign the same luid to all registers set to constants. */
9388 reg_set_luid[regno] = move2add_last_label_luid + 1;
9389 reg_mode[regno] = mode;
9390 return;
9391
9392 default:
9393 invalidate:
9394 /* Invalidate the contents of the register. */
9395 reg_set_luid[regno] = 0;
9396 return;
9397 }
9398
9399 base_regno = REGNO (base_reg);
9400 /* If information about the base register is not valid, set it
9401 up as a new base register, pretending its value is known
9402 starting from the current insn. */
9403 if (reg_set_luid[base_regno] <= move2add_last_label_luid)
9404 {
9405 reg_base_reg[base_regno] = base_regno;
9406 reg_offset[base_regno] = 0;
9407 reg_set_luid[base_regno] = move2add_luid;
9408 reg_mode[base_regno] = mode;
9409 }
9410 else if (! MODES_OK_FOR_MOVE2ADD (dst_mode,
9411 reg_mode[base_regno]))
9412 goto invalidate;
9413
9414 reg_mode[regno] = mode;
9415
9416 /* Copy base information from our base register. */
9417 reg_set_luid[regno] = reg_set_luid[base_regno];
9418 reg_base_reg[regno] = reg_base_reg[base_regno];
9419
9420 /* Compute the sum of the offsets or constants. */
9421 reg_offset[regno] = sext_for_mode (dst_mode,
9422 offset
9423 + reg_offset[base_regno]);
9424 }
9425 else
9426 {
9427 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, mode);
9428
9429 for (i = regno; i < endregno; i++)
9430 /* Reset the information about this register. */
9431 reg_set_luid[i] = 0;
9432 }
9433}
9434
9435#ifdef AUTO_INC_DEC
9436static void
9437add_auto_inc_notes (insn, x)
9438 rtx insn;
9439 rtx x;
9440{
9441 enum rtx_code code = GET_CODE (x);
9442 const char *fmt;
9443 int i, j;
9444
9445 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9446 {
9447 REG_NOTES (insn)
9448 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
9449 return;
9450 }
9451
9452 /* Scan all the operand sub-expressions. */
9453 fmt = GET_RTX_FORMAT (code);
9454 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9455 {
9456 if (fmt[i] == 'e')
9457 add_auto_inc_notes (insn, XEXP (x, i));
9458 else if (fmt[i] == 'E')
9459 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9460 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9461 }
9462}
9463#endif
9464
9465/* Copy EH notes from an insn to its reloads. */
9466static void
9467copy_eh_notes (insn, x)
9468 rtx insn;
9469 rtx x;
9470{
9471 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
9472 if (eh_note)
9473 {
9474 for (; x != 0; x = NEXT_INSN (x))
9475 {
9476 if (may_trap_p (PATTERN (x)))
9477 REG_NOTES (x)
9478 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
9479 REG_NOTES (x));
9480 }
9481 }
9482}
9483
9484/* This is used by reload pass, that does emit some instructions after
9485 abnormal calls moving basic block end, but in fact it wants to emit
9486 them on the edge. Looks for abnormal call edges, find backward the
9487 proper call and fix the damage.
9488
9489 Similar handle instructions throwing exceptions internally. */
9490void
9491fixup_abnormal_edges ()
9492{
9493 int i;
9494 bool inserted = false;
9495
9496 for (i = 0; i < n_basic_blocks; i++)
9497 {
9498 basic_block bb = BASIC_BLOCK (i);
9499 edge e;
9500
9501 /* Look for cases we are interested in - an calls or instructions causing
9502 exceptions. */
9503 for (e = bb->succ; e; e = e->succ_next)
9504 {
9505 if (e->flags & EDGE_ABNORMAL_CALL)
9506 break;
9507 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
9508 == (EDGE_ABNORMAL | EDGE_EH))
9509 break;
9510 }
9511 if (e && GET_CODE (bb->end) != CALL_INSN && !can_throw_internal (bb->end))
9512 {
9513 rtx insn = bb->end, stop = NEXT_INSN (bb->end);
9514 rtx next;
9515 for (e = bb->succ; e; e = e->succ_next)
9516 if (e->flags & EDGE_FALLTHRU)
9517 break;
9518 /* Get past the new insns generated. Allow notes, as the insns may
9519 be already deleted. */
9520 while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
9521 && !can_throw_internal (insn)
9522 && insn != bb->head)
9523 insn = PREV_INSN (insn);
9524 if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
9525 abort ();
9526 bb->end = insn;
9527 inserted = true;
9528 insn = NEXT_INSN (insn);
9529 while (insn && insn != stop)
9530 {
9531 next = NEXT_INSN (insn);
9532 if (INSN_P (insn))
9533 {
9534 rtx seq;
9535
9536 delete_insn (insn);
9537
9538 /* We're not deleting it, we're moving it. */
9539 INSN_DELETED_P (insn) = 0;
9540
9541 /* Emit a sequence, rather than scarfing the pattern, so
9542 that we don't lose REG_NOTES etc. */
9543 /* ??? Could copy the test from gen_sequence, but don't
9544 think it's worth the bother. */
9545 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (1, insn));
9546 insert_insn_on_edge (seq, e);
9547 }
9548 insn = next;
9549 }
9550 }
9551 }
9552 if (inserted)
9553 commit_edge_insertions ();
9554}