1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA.  */
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS.  */
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "regs.h"
31#include "basic-block.h"
32#include "flags.h"
33#include "real.h"
34#include "insn-config.h"
35#include "recog.h"
36#include "function.h"
37#include "expr.h"
38#include "toplev.h"
39#include "output.h"
40#include "ggc.h"
41#include "timevar.h"
42#include "except.h"
43#include "target.h"
44#include "params.h"
45#include "rtlhooks-def.h"
46#include "tree-pass.h"
47
48/* The basic idea of common subexpression elimination is to go
49   through the code, keeping a record of expressions that would
50   have the same value at the current scan point, and replacing
51   expressions encountered with the cheapest equivalent expression.
52
53   It is too complicated to keep track of the different possibilities
54   when control paths merge in this code; so, at each label, we forget all
55   that is known and start fresh.  This can be described as processing each
56   extended basic block separately.  We have a separate pass to perform
57   global CSE.
58
59   Note CSE can turn a conditional or computed jump into a nop or
60   an unconditional jump.  When this occurs we arrange to run the jump
61   optimizer after CSE to delete the unreachable code.
62
63   We use two data structures to record the equivalent expressions:
64   a hash table for most expressions, and a vector of "quantity
65   numbers" to record equivalent (pseudo) registers.
66
67   The use of the special data structure for registers is desirable
68   because it is faster.  It is possible because registers references
69   contain a fairly small number, the register number, taken from
70   a contiguously allocated series, and two register references are
71   identical if they have the same number.  General expressions
72   do not have any such thing, so the only way to retrieve the
73   information recorded on an expression other than a register
74   is to keep it in a hash table.
75
76Registers and "quantity numbers":
77
78   At the start of each basic block, all of the (hardware and pseudo)
79   registers used in the function are given distinct quantity
80   numbers to indicate their contents.  During scan, when the code
81   copies one register into another, we copy the quantity number.
82   When a register is loaded in any other way, we allocate a new
83   quantity number to describe the value generated by this operation.
84   `REG_QTY (N)' records what quantity register N is currently thought
85   of as containing.
86
87   All real quantity numbers are greater than or equal to zero.
88   If register N has not been assigned a quantity, `REG_QTY (N)' will
89   equal -N - 1, which is always negative.
90
91   Quantity numbers below zero do not exist and none of the `qty_table'
92   entries should be referenced with a negative index.
93
94   We also maintain a bidirectional chain of registers for each
95   quantity number.  The `qty_table` members `first_reg' and `last_reg',
96   and `reg_eqv_table' members `next' and `prev' hold these chains.
97
98   The first register in a chain is the one whose lifespan is least local.
99   Among equals, it is the one that was seen first.
100   We replace any equivalent register with that one.
101
102   If two registers have the same quantity number, it must be true that
103   REG expressions with qty_table `mode' must be in the hash table for both
104   registers and must be in the same class.
105
106   The converse is not true.  Since hard registers may be referenced in
107   any mode, two REG expressions might be equivalent in the hash table
108   but not have the same quantity number if the quantity number of one
109   of the registers is not the same mode as those expressions.
110
111Constants and quantity numbers
112
113   When a quantity has a known constant value, that value is stored
114   in the appropriate qty_table `const_rtx'.  This is in addition to
115   putting the constant in the hash table as is usual for non-regs.
116
117   Whether a reg or a constant is preferred is determined by the configuration
118   macro CONST_COSTS and will often depend on the constant value.  In any
119   event, expressions containing constants can be simplified, by fold_rtx.
120
121   When a quantity has a known nearly constant value (such as an address
122   of a stack slot), that value is stored in the appropriate qty_table
123   `const_rtx'.
124
125   Integer constants don't have a machine mode.  However, cse
126   determines the intended machine mode from the destination
127   of the instruction that moves the constant.  The machine mode
128   is recorded in the hash table along with the actual RTL
129   constant expression so that different modes are kept separate.
130
131Other expressions:
132
133   To record known equivalences among expressions in general
134   we use a hash table called `table'.  It has a fixed number of buckets
135   that contain chains of `struct table_elt' elements for expressions.
136   These chains connect the elements whose expressions have the same
137   hash codes.
138
139   Other chains through the same elements connect the elements which
140   currently have equivalent values.
141
142   Register references in an expression are canonicalized before hashing
143   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
144   The hash code of a register reference is computed using the quantity
145   number, not the register number.
146
147   When the value of an expression changes, it is necessary to remove from the
148   hash table not just that expression but all expressions whose values
149   could be different as a result.
150
151     1. If the value changing is in memory, except in special cases
152     ANYTHING referring to memory could be changed.  That is because
153     nobody knows where a pointer does not point.
154     The function `invalidate_memory' removes what is necessary.
155
156     The special cases are when the address is constant or is
157     a constant plus a fixed register such as the frame pointer
158     or a static chain pointer.  When such addresses are stored in,
159     we can tell exactly which other such addresses must be invalidated
160     due to overlap.  `invalidate' does this.
161     All expressions that refer to non-constant
162     memory addresses are also invalidated.  `invalidate_memory' does this.
163
164     2. If the value changing is a register, all expressions
165     containing references to that register, and only those,
166     must be removed.
167
168   Because searching the entire hash table for expressions that contain
169   a register is very slow, we try to figure out when it isn't necessary.
170   Precisely, this is necessary only when expressions have been
171   entered in the hash table using this register, and then the value has
172   changed, and then another expression wants to be added to refer to
173   the register's new value.  This sequence of circumstances is rare
174   within any one basic block.
175
176   `REG_TICK' and `REG_IN_TABLE', accessors for members of
177   cse_reg_info, are used to detect this case.  REG_TICK (i) is
178   incremented whenever a value is stored in register i.
179   REG_IN_TABLE (i) holds -1 if no references to register i have been
180   entered in the table; otherwise, it contains the value REG_TICK (i)
181   had when the references were entered.  If we want to enter a
182   reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
183   remove old references.  Until we want to enter a new entry, the
184   mere fact that the two vectors don't match makes the entries be
185   ignored if anyone tries to match them.
186
187   Registers themselves are entered in the hash table as well as in
188   the equivalent-register chains.  However, `REG_TICK' and
189   `REG_IN_TABLE' do not apply to expressions which are simple
190   register references.  These expressions are removed from the table
191   immediately when they become invalid, and this can be done even if
192   we do not immediately search for all the expressions that refer to
193   the register.
194
195   A CLOBBER rtx in an instruction invalidates its operand for further
196   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
197   invalidates everything that resides in memory.
198
199Related expressions:
200
201   Constant expressions that differ only by an additive integer
202   are called related.  When a constant expression is put in
203   the table, the related expression with no constant term
204   is also entered.  These are made to point at each other
205   so that it is possible to find out if there exists any
206   register equivalent to an expression related to a given expression.  */
207
208/* Length of qty_table vector.  We know in advance we will not need
209   a quantity number this big.  */
210
211static int max_qty;
212
213/* Next quantity number to be allocated.
214   This is 1 + the largest number needed so far.  */
215
216static int next_qty;
217
218/* Per-qty information tracking.
219
220   `first_reg' and `last_reg' track the head and tail of the
221   chain of registers which currently contain this quantity.
222
223   `mode' contains the machine mode of this quantity.
224
225   `const_rtx' holds the rtx of the constant value of this
226   quantity, if known.  A summations of the frame/arg pointer
227   and a constant can also be entered here.  When this holds
228   a known value, `const_insn' is the insn which stored the
229   constant value.
230
231   `comparison_{code,const,qty}' are used to track when a
232   comparison between a quantity and some constant or register has
233   been passed.  In such a case, we know the results of the comparison
234   in case we see it again.  These members record a comparison that
235   is known to be true.  `comparison_code' holds the rtx code of such
236   a comparison, else it is set to UNKNOWN and the other two
237   comparison members are undefined.  `comparison_const' holds
238   the constant being compared against, or zero if the comparison
239   is not against a constant.  `comparison_qty' holds the quantity
240   being compared against when the result is known.  If the comparison
241   is not with a register, `comparison_qty' is -1.  */
242
243struct qty_table_elem
244{
245  rtx const_rtx;
246  rtx const_insn;
247  rtx comparison_const;
248  int comparison_qty;
249  unsigned int first_reg, last_reg;
250  /* The sizes of these fields should match the sizes of the
251     code and mode fields of struct rtx_def (see rtl.h).  */
252  ENUM_BITFIELD(rtx_code) comparison_code : 16;
253  ENUM_BITFIELD(machine_mode) mode : 8;
254};
255
256/* The table of all qtys, indexed by qty number.  */
257static struct qty_table_elem *qty_table;
258
259/* Structure used to pass arguments via for_each_rtx to function
260   cse_change_cc_mode.  */
261struct change_cc_mode_args
262{
263  rtx insn;
264  rtx newreg;
265};
266
267#ifdef HAVE_cc0
268/* For machines that have a CC0, we do not record its value in the hash
269   table since its use is guaranteed to be the insn immediately following
270   its definition and any other insn is presumed to invalidate it.
271
272   Instead, we store below the value last assigned to CC0.  If it should
273   happen to be a constant, it is stored in preference to the actual
274   assigned value.  In case it is a constant, we store the mode in which
275   the constant should be interpreted.  */
276
277static rtx prev_insn_cc0;
278static enum machine_mode prev_insn_cc0_mode;
279
280/* Previous actual insn.  0 if at first insn of basic block.  */
281
282static rtx prev_insn;
283#endif
284
285/* Insn being scanned.  */
286
287static rtx this_insn;
288
289/* Index by register number, gives the number of the next (or
290   previous) register in the chain of registers sharing the same
291   value.
292
293   Or -1 if this register is at the end of the chain.
294
295   If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
296
297/* Per-register equivalence chain.  */
298struct reg_eqv_elem
299{
300  int next, prev;
301};
302
303/* The table of all register equivalence chains.  */
304static struct reg_eqv_elem *reg_eqv_table;
305
306struct cse_reg_info
307{
308  /* The timestamp at which this register is initialized.  */
309  unsigned int timestamp;
310
311  /* The quantity number of the register's current contents.  */
312  int reg_qty;
313
314  /* The number of times the register has been altered in the current
315     basic block.  */
316  int reg_tick;
317
318  /* The REG_TICK value at which rtx's containing this register are
319     valid in the hash table.  If this does not equal the current
320     reg_tick value, such expressions existing in the hash table are
321     invalid.  */
322  int reg_in_table;
323
324  /* The SUBREG that was set when REG_TICK was last incremented.  Set
325     to -1 if the last store was to the whole register, not a subreg.  */
326  unsigned int subreg_ticked;
327};
328
329/* A table of cse_reg_info indexed by register numbers.  */
330static struct cse_reg_info *cse_reg_info_table;
331
332/* The size of the above table.  */
333static unsigned int cse_reg_info_table_size;
334
335/* The index of the first entry that has not been initialized.  */
336static unsigned int cse_reg_info_table_first_uninitialized;
337
338/* The timestamp at the beginning of the current run of
339   cse_basic_block.  We increment this variable at the beginning of
340   the current run of cse_basic_block.  The timestamp field of a
341   cse_reg_info entry matches the value of this variable if and only
342   if the entry has been initialized during the current run of
343   cse_basic_block.  */
344static unsigned int cse_reg_info_timestamp;
345
346/* A HARD_REG_SET containing all the hard registers for which there is
347   currently a REG expression in the hash table.  Note the difference
348   from the above variables, which indicate if the REG is mentioned in some
349   expression in the table.  */
350
351static HARD_REG_SET hard_regs_in_table;
352
353/* CUID of insn that starts the basic block currently being cse-processed.  */
354
355static int cse_basic_block_start;
356
357/* CUID of insn that ends the basic block currently being cse-processed.  */
358
359static int cse_basic_block_end;
360
361/* Vector mapping INSN_UIDs to cuids.
362   The cuids are like uids but increase monotonically always.
363   We use them to see whether a reg is used outside a given basic block.  */
364
365static int *uid_cuid;
366
367/* Highest UID in UID_CUID.  */
368static int max_uid;
369
370/* Get the cuid of an insn.  */
371
372#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
373
374/* Nonzero if this pass has made changes, and therefore it's
375   worthwhile to run the garbage collector.  */
376
377static int cse_altered;
378
379/* Nonzero if cse has altered conditional jump insns
380   in such a way that jump optimization should be redone.  */
381
382static int cse_jumps_altered;
383
384/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
385   REG_LABEL, we have to rerun jump after CSE to put in the note.  */
386static int recorded_label_ref;
387
388/* canon_hash stores 1 in do_not_record
389   if it notices a reference to CC0, PC, or some other volatile
390   subexpression.  */
391
392static int do_not_record;
393
394/* canon_hash stores 1 in hash_arg_in_memory
395   if it notices a reference to memory within the expression being hashed.  */
396
397static int hash_arg_in_memory;
398
399/* The hash table contains buckets which are chains of `struct table_elt's,
400   each recording one expression's information.
401   That expression is in the `exp' field.
402
403   The canon_exp field contains a canonical (from the point of view of
404   alias analysis) version of the `exp' field.
405
406   Those elements with the same hash code are chained in both directions
407   through the `next_same_hash' and `prev_same_hash' fields.
408
409   Each set of expressions with equivalent values
410   are on a two-way chain through the `next_same_value'
411   and `prev_same_value' fields, and all point with
412   the `first_same_value' field at the first element in
413   that chain.  The chain is in order of increasing cost.
414   Each element's cost value is in its `cost' field.
415
416   The `in_memory' field is nonzero for elements that
417   involve any reference to memory.  These elements are removed
418   whenever a write is done to an unidentified location in memory.
419   To be safe, we assume that a memory address is unidentified unless
420   the address is either a symbol constant or a constant plus
421   the frame pointer or argument pointer.
422
423   The `related_value' field is used to connect related expressions
424   (that differ by adding an integer).
425   The related expressions are chained in a circular fashion.
426   `related_value' is zero for expressions for which this
427   chain is not useful.
428
429   The `cost' field stores the cost of this element's expression.
430   The `regcost' field stores the value returned by approx_reg_cost for
431   this element's expression.
432
433   The `is_const' flag is set if the element is a constant (including
434   a fixed address).
435
436   The `flag' field is used as a temporary during some search routines.
437
438   The `mode' field is usually the same as GET_MODE (`exp'), but
439   if `exp' is a CONST_INT and has no machine mode then the `mode'
440   field is the mode it was being used as.  Each constant is
441   recorded separately for each mode it is used with.  */
442
443struct table_elt
444{
445  rtx exp;
446  rtx canon_exp;
447  struct table_elt *next_same_hash;
448  struct table_elt *prev_same_hash;
449  struct table_elt *next_same_value;
450  struct table_elt *prev_same_value;
451  struct table_elt *first_same_value;
452  struct table_elt *related_value;
453  int cost;
454  int regcost;
455  /* The size of this field should match the size
456     of the mode field of struct rtx_def (see rtl.h).  */
457  ENUM_BITFIELD(machine_mode) mode : 8;
458  char in_memory;
459  char is_const;
460  char flag;
461};
462
463/* We don't want a lot of buckets, because we rarely have very many
464   things stored in the hash table, and a lot of buckets slows
465   down a lot of loops that happen frequently.  */
466#define HASH_SHIFT	5
467#define HASH_SIZE	(1 << HASH_SHIFT)
468#define HASH_MASK	(HASH_SIZE - 1)
469
470/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
471   register (hard registers may require `do_not_record' to be set).  */
472
473#define HASH(X, M)	\
474 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
475  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
476  : canon_hash (X, M)) & HASH_MASK)
477
478/* Like HASH, but without side-effects.  */
479#define SAFE_HASH(X, M)	\
480 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
481  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
482  : safe_hash (X, M)) & HASH_MASK)
483
484/* Determine whether register number N is considered a fixed register for the
485   purpose of approximating register costs.
486   It is desirable to replace other regs with fixed regs, to reduce need for
487   non-fixed hard regs.
488   A reg wins if it is either the frame pointer or designated as fixed.  */
489#define FIXED_REGNO_P(N)  \
490  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491   || fixed_regs[N] || global_regs[N])
492
493/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
494   hard registers and pointers into the frame are the cheapest with a cost
495   of 0.  Next come pseudos with a cost of one and other hard registers with
496   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
497
498#define CHEAP_REGNO(N)							\
499  (REGNO_PTR_FRAME_P(N)							\
500   || (HARD_REGISTER_NUM_P (N)						\
501       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
502
503#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
504#define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
505
506/* Get the number of times this register has been updated in this
507   basic block.  */
508
509#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
510
511/* Get the point at which REG was recorded in the table.  */
512
513#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
514
515/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
516   SUBREG).  */
517
518#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
519
520/* Get the quantity number for REG.  */
521
522#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
523
524/* Determine if the quantity number for register X represents a valid index
525   into the qty_table.  */
526
527#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
528
529static struct table_elt *table[HASH_SIZE];
530
531/* Number of elements in the hash table.  */
532
533static unsigned int table_size;
534
535/* Chain of `struct table_elt's made so far for this function
536   but currently removed from the table.  */
537
538static struct table_elt *free_element_chain;
539
540/* Set to the cost of a constant pool reference if one was found for a
541   symbolic constant.  If this was found, it means we should try to
542   convert constants into constant pool entries if they don't fit in
543   the insn.  */
544
545static int constant_pool_entries_cost;
546static int constant_pool_entries_regcost;
547
548/* This data describes a block that will be processed by cse_basic_block.  */
549
550struct cse_basic_block_data
551{
552  /* Lowest CUID value of insns in block.  */
553  int low_cuid;
554  /* Highest CUID value of insns in block.  */
555  int high_cuid;
556  /* Total number of SETs in block.  */
557  int nsets;
558  /* Last insn in the block.  */
559  rtx last;
560  /* Size of current branch path, if any.  */
561  int path_size;
562  /* Current branch path, indicating which branches will be taken.  */
563  struct branch_path
564    {
565      /* The branch insn.  */
566      rtx branch;
567      /* Whether it should be taken or not.  AROUND is the same as taken
568	 except that it is used when the destination label is not preceded
569       by a BARRIER.  */
570      enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
571    } *path;
572};
573
574static bool fixed_base_plus_p (rtx x);
575static int notreg_cost (rtx, enum rtx_code);
576static int approx_reg_cost_1 (rtx *, void *);
577static int approx_reg_cost (rtx);
578static int preferable (int, int, int, int);
579static void new_basic_block (void);
580static void make_new_qty (unsigned int, enum machine_mode);
581static void make_regs_eqv (unsigned int, unsigned int);
582static void delete_reg_equiv (unsigned int);
583static int mention_regs (rtx);
584static int insert_regs (rtx, struct table_elt *, int);
585static void remove_from_table (struct table_elt *, unsigned);
586static void remove_pseudo_from_table (rtx, unsigned);
587static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
588static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
589static rtx lookup_as_function (rtx, enum rtx_code);
590static struct table_elt *insert (rtx, struct table_elt *, unsigned,
591				 enum machine_mode);
592static void merge_equiv_classes (struct table_elt *, struct table_elt *);
593static void invalidate (rtx, enum machine_mode);
594static int cse_rtx_varies_p (rtx, int);
595static void remove_invalid_refs (unsigned int);
596static void remove_invalid_subreg_refs (unsigned int, unsigned int,
597					enum machine_mode);
598static void rehash_using_reg (rtx);
599static void invalidate_memory (void);
600static void invalidate_for_call (void);
601static rtx use_related_value (rtx, struct table_elt *);
602
603static inline unsigned canon_hash (rtx, enum machine_mode);
604static inline unsigned safe_hash (rtx, enum machine_mode);
605static unsigned hash_rtx_string (const char *);
606
607static rtx canon_reg (rtx, rtx);
608static void find_best_addr (rtx, rtx *, enum machine_mode);
609static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
610					   enum machine_mode *,
611					   enum machine_mode *);
612static rtx fold_rtx (rtx, rtx);
613static rtx equiv_constant (rtx);
614static void record_jump_equiv (rtx, int);
615static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
616			      int);
617static void cse_insn (rtx, rtx);
618static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
619				    int, int);
620static int addr_affects_sp_p (rtx);
621static void invalidate_from_clobbers (rtx);
622static rtx cse_process_notes (rtx, rtx);
623static void invalidate_skipped_set (rtx, rtx, void *);
624static void invalidate_skipped_block (rtx);
625static rtx cse_basic_block (rtx, rtx, struct branch_path *);
626static void count_reg_usage (rtx, int *, rtx, int);
627static int check_for_label_ref (rtx *, void *);
628extern void dump_class (struct table_elt*);
629static void get_cse_reg_info_1 (unsigned int regno);
630static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
631static int check_dependence (rtx *, void *);
632
633static void flush_hash_table (void);
634static bool insn_live_p (rtx, int *);
635static bool set_live_p (rtx, rtx, int *);
636static bool dead_libcall_p (rtx, int *);
637static int cse_change_cc_mode (rtx *, void *);
638static void cse_change_cc_mode_insn (rtx, rtx);
639static void cse_change_cc_mode_insns (rtx, rtx, rtx);
640static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
641
642
643#undef RTL_HOOKS_GEN_LOWPART
644#define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
645
646static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
647
648/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
649   virtual regs here because the simplify_*_operation routines are called
650   by integrate.c, which is called before virtual register instantiation.  */
651
652static bool
653fixed_base_plus_p (rtx x)
654{
655  switch (GET_CODE (x))
656    {
657    case REG:
658      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
659	return true;
660      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
661	return true;
662      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
663	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
664	return true;
665      return false;
666
667    case PLUS:
668      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
669	return false;
670      return fixed_base_plus_p (XEXP (x, 0));
671
672    default:
673      return false;
674    }
675}
676
677/* Dump the expressions in the equivalence class indicated by CLASSP.
678   This function is used only for debugging.  */
679void
680dump_class (struct table_elt *classp)
681{
682  struct table_elt *elt;
683
684  fprintf (stderr, "Equivalence chain for ");
685  print_rtl (stderr, classp->exp);
686  fprintf (stderr, ": \n");
687
688  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
689    {
690      print_rtl (stderr, elt->exp);
691      fprintf (stderr, "\n");
692    }
693}
694
695/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
696
697static int
698approx_reg_cost_1 (rtx *xp, void *data)
699{
700  rtx x = *xp;
701  int *cost_p = data;
702
703  if (x && REG_P (x))
704    {
705      unsigned int regno = REGNO (x);
706
707      if (! CHEAP_REGNO (regno))
708	{
709	  if (regno < FIRST_PSEUDO_REGISTER)
710	    {
711	      if (SMALL_REGISTER_CLASSES)
712		return 1;
713	      *cost_p += 2;
714	    }
715	  else
716	    *cost_p += 1;
717	}
718    }
719
720  return 0;
721}
722
723/* Return an estimate of the cost of the registers used in an rtx.
724   This is mostly the number of different REG expressions in the rtx;
725   however for some exceptions like fixed registers we use a cost of
726   0.  If any other hard register reference occurs, return MAX_COST.  */
727
728static int
729approx_reg_cost (rtx x)
730{
731  int cost = 0;
732
733  if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
734    return MAX_COST;
735
736  return cost;
737}
738
739/* Returns a canonical version of X for the address, from the point of view,
740   that all multiplications are represented as MULT instead of the multiply
741   by a power of 2 being represented as ASHIFT.  */
742
743static rtx
744canon_for_address (rtx x)
745{
746  enum rtx_code code;
747  enum machine_mode mode;
748  rtx new = 0;
749  int i;
750  const char *fmt;
751
752  if (!x)
753    return x;
754
755  code = GET_CODE (x);
756  mode = GET_MODE (x);
757
758  switch (code)
759    {
760    case ASHIFT:
761      if (GET_CODE (XEXP (x, 1)) == CONST_INT
762	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
763	  && INTVAL (XEXP (x, 1)) >= 0)
764        {
765	  new = canon_for_address (XEXP (x, 0));
766	  new = gen_rtx_MULT (mode, new,
767			      gen_int_mode ((HOST_WIDE_INT) 1
768				            << INTVAL (XEXP (x, 1)),
769					    mode));
770	}
771      break;
772    default:
773      break;
774
775    }
776  if (new)
777    return new;
778
779  /* Now recursively process each operand of this operation.  */
780  fmt = GET_RTX_FORMAT (code);
781  for (i = 0; i < GET_RTX_LENGTH (code); i++)
782    if (fmt[i] == 'e')
783      {
784	new = canon_for_address (XEXP (x, i));
785	XEXP (x, i) = new;
786      }
787  return x;
788}
789
790/* Return a negative value if an rtx A, whose costs are given by COST_A
791   and REGCOST_A, is more desirable than an rtx B.
792   Return a positive value if A is less desirable, or 0 if the two are
793   equally good.  */
794static int
795preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
796{
797  /* First, get rid of cases involving expressions that are entirely
798     unwanted.  */
799  if (cost_a != cost_b)
800    {
801      if (cost_a == MAX_COST)
802	return 1;
803      if (cost_b == MAX_COST)
804	return -1;
805    }
806
807  /* Avoid extending lifetimes of hardregs.  */
808  if (regcost_a != regcost_b)
809    {
810      if (regcost_a == MAX_COST)
811	return 1;
812      if (regcost_b == MAX_COST)
813	return -1;
814    }
815
816  /* Normal operation costs take precedence.  */
817  if (cost_a != cost_b)
818    return cost_a - cost_b;
819  /* Only if these are identical consider effects on register pressure.  */
820  if (regcost_a != regcost_b)
821    return regcost_a - regcost_b;
822  return 0;
823}
824
825/* Internal function, to compute cost when X is not a register; called
826   from COST macro to keep it simple.  */
827
828static int
829notreg_cost (rtx x, enum rtx_code outer)
830{
831  return ((GET_CODE (x) == SUBREG
832	   && REG_P (SUBREG_REG (x))
833	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
834	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
835	   && (GET_MODE_SIZE (GET_MODE (x))
836	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
837	   && subreg_lowpart_p (x)
838	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
839				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
840	  ? 0
841	  : rtx_cost (x, outer) * 2);
842}
843
844
845/* Initialize CSE_REG_INFO_TABLE.  */
846
847static void
848init_cse_reg_info (unsigned int nregs)
849{
850  /* Do we need to grow the table?  */
851  if (nregs > cse_reg_info_table_size)
852    {
853      unsigned int new_size;
854
855      if (cse_reg_info_table_size < 2048)
856	{
857	  /* Compute a new size that is a power of 2 and no smaller
858	     than the large of NREGS and 64.  */
859	  new_size = (cse_reg_info_table_size
860		      ? cse_reg_info_table_size : 64);
861
862	  while (new_size < nregs)
863	    new_size *= 2;
864	}
865      else
866	{
867	  /* If we need a big table, allocate just enough to hold
868	     NREGS registers.  */
869	  new_size = nregs;
870	}
871
872      /* Reallocate the table with NEW_SIZE entries.  */
873      if (cse_reg_info_table)
874	free (cse_reg_info_table);
875      cse_reg_info_table = xmalloc (sizeof (struct cse_reg_info)
876				     * new_size);
877      cse_reg_info_table_size = new_size;
878      cse_reg_info_table_first_uninitialized = 0;
879    }
880
881  /* Do we have all of the first NREGS entries initialized?  */
882  if (cse_reg_info_table_first_uninitialized < nregs)
883    {
884      unsigned int old_timestamp = cse_reg_info_timestamp - 1;
885      unsigned int i;
886
887      /* Put the old timestamp on newly allocated entries so that they
888	 will all be considered out of date.  We do not touch those
889	 entries beyond the first NREGS entries to be nice to the
890	 virtual memory.  */
891      for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
892	cse_reg_info_table[i].timestamp = old_timestamp;
893
894      cse_reg_info_table_first_uninitialized = nregs;
895    }
896}
897
898/* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
899
900static void
901get_cse_reg_info_1 (unsigned int regno)
902{
903  /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
904     entry will be considered to have been initialized.  */
905  cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
906
907  /* Initialize the rest of the entry.  */
908  cse_reg_info_table[regno].reg_tick = 1;
909  cse_reg_info_table[regno].reg_in_table = -1;
910  cse_reg_info_table[regno].subreg_ticked = -1;
911  cse_reg_info_table[regno].reg_qty = -regno - 1;
912}
913
914/* Find a cse_reg_info entry for REGNO.  */
915
916static inline struct cse_reg_info *
917get_cse_reg_info (unsigned int regno)
918{
919  struct cse_reg_info *p = &cse_reg_info_table[regno];
920
921  /* If this entry has not been initialized, go ahead and initialize
922     it.  */
923  if (p->timestamp != cse_reg_info_timestamp)
924    get_cse_reg_info_1 (regno);
925
926  return p;
927}
928
929/* Clear the hash table and initialize each register with its own quantity,
930   for a new basic block.  */
931
932static void
933new_basic_block (void)
934{
935  int i;
936
937  next_qty = 0;
938
939  /* Invalidate cse_reg_info_table.  */
940  cse_reg_info_timestamp++;
941
942  /* Clear out hash table state for this pass.  */
943  CLEAR_HARD_REG_SET (hard_regs_in_table);
944
945  /* The per-quantity values used to be initialized here, but it is
946     much faster to initialize each as it is made in `make_new_qty'.  */
947
948  for (i = 0; i < HASH_SIZE; i++)
949    {
950      struct table_elt *first;
951
952      first = table[i];
953      if (first != NULL)
954	{
955	  struct table_elt *last = first;
956
957	  table[i] = NULL;
958
959	  while (last->next_same_hash != NULL)
960	    last = last->next_same_hash;
961
962	  /* Now relink this hash entire chain into
963	     the free element list.  */
964
965	  last->next_same_hash = free_element_chain;
966	  free_element_chain = first;
967	}
968    }
969
970  table_size = 0;
971
972#ifdef HAVE_cc0
973  prev_insn = 0;
974  prev_insn_cc0 = 0;
975#endif
976}
977
978/* Say that register REG contains a quantity in mode MODE not in any
979   register before and initialize that quantity.  */
980
981static void
982make_new_qty (unsigned int reg, enum machine_mode mode)
983{
984  int q;
985  struct qty_table_elem *ent;
986  struct reg_eqv_elem *eqv;
987
988  gcc_assert (next_qty < max_qty);
989
990  q = REG_QTY (reg) = next_qty++;
991  ent = &qty_table[q];
992  ent->first_reg = reg;
993  ent->last_reg = reg;
994  ent->mode = mode;
995  ent->const_rtx = ent->const_insn = NULL_RTX;
996  ent->comparison_code = UNKNOWN;
997
998  eqv = &reg_eqv_table[reg];
999  eqv->next = eqv->prev = -1;
1000}
1001
1002/* Make reg NEW equivalent to reg OLD.
1003   OLD is not changing; NEW is.  */
1004
1005static void
1006make_regs_eqv (unsigned int new, unsigned int old)
1007{
1008  unsigned int lastr, firstr;
1009  int q = REG_QTY (old);
1010  struct qty_table_elem *ent;
1011
1012  ent = &qty_table[q];
1013
1014  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1015  gcc_assert (REGNO_QTY_VALID_P (old));
1016
1017  REG_QTY (new) = q;
1018  firstr = ent->first_reg;
1019  lastr = ent->last_reg;
1020
1021  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1022     hard regs.  Among pseudos, if NEW will live longer than any other reg
1023     of the same qty, and that is beyond the current basic block,
1024     make it the new canonical replacement for this qty.  */
1025  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1026      /* Certain fixed registers might be of the class NO_REGS.  This means
1027	 that not only can they not be allocated by the compiler, but
1028	 they cannot be used in substitutions or canonicalizations
1029	 either.  */
1030      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1031      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1032	  || (new >= FIRST_PSEUDO_REGISTER
1033	      && (firstr < FIRST_PSEUDO_REGISTER
1034		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1035		       || (uid_cuid[REGNO_FIRST_UID (new)]
1036			   < cse_basic_block_start))
1037		      && (uid_cuid[REGNO_LAST_UID (new)]
1038			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1039    {
1040      reg_eqv_table[firstr].prev = new;
1041      reg_eqv_table[new].next = firstr;
1042      reg_eqv_table[new].prev = -1;
1043      ent->first_reg = new;
1044    }
1045  else
1046    {
1047      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1048	 Otherwise, insert before any non-fixed hard regs that are at the
1049	 end.  Registers of class NO_REGS cannot be used as an
1050	 equivalent for anything.  */
1051      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1052	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1053	     && new >= FIRST_PSEUDO_REGISTER)
1054	lastr = reg_eqv_table[lastr].prev;
1055      reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1056      if (reg_eqv_table[lastr].next >= 0)
1057	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1058      else
1059	qty_table[q].last_reg = new;
1060      reg_eqv_table[lastr].next = new;
1061      reg_eqv_table[new].prev = lastr;
1062    }
1063}
1064
1065/* Remove REG from its equivalence class.  */
1066
1067static void
1068delete_reg_equiv (unsigned int reg)
1069{
1070  struct qty_table_elem *ent;
1071  int q = REG_QTY (reg);
1072  int p, n;
1073
1074  /* If invalid, do nothing.  */
1075  if (! REGNO_QTY_VALID_P (reg))
1076    return;
1077
1078  ent = &qty_table[q];
1079
1080  p = reg_eqv_table[reg].prev;
1081  n = reg_eqv_table[reg].next;
1082
1083  if (n != -1)
1084    reg_eqv_table[n].prev = p;
1085  else
1086    ent->last_reg = p;
1087  if (p != -1)
1088    reg_eqv_table[p].next = n;
1089  else
1090    ent->first_reg = n;
1091
1092  REG_QTY (reg) = -reg - 1;
1093}
1094
1095/* Remove any invalid expressions from the hash table
1096   that refer to any of the registers contained in expression X.
1097
1098   Make sure that newly inserted references to those registers
1099   as subexpressions will be considered valid.
1100
1101   mention_regs is not called when a register itself
1102   is being stored in the table.
1103
1104   Return 1 if we have done something that may have changed the hash code
1105   of X.  */
1106
1107static int
1108mention_regs (rtx x)
1109{
1110  enum rtx_code code;
1111  int i, j;
1112  const char *fmt;
1113  int changed = 0;
1114
1115  if (x == 0)
1116    return 0;
1117
1118  code = GET_CODE (x);
1119  if (code == REG)
1120    {
1121      unsigned int regno = REGNO (x);
1122      unsigned int endregno
1123	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1124		   : hard_regno_nregs[regno][GET_MODE (x)]);
1125      unsigned int i;
1126
1127      for (i = regno; i < endregno; i++)
1128	{
1129	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1130	    remove_invalid_refs (i);
1131
1132	  REG_IN_TABLE (i) = REG_TICK (i);
1133	  SUBREG_TICKED (i) = -1;
1134	}
1135
1136      return 0;
1137    }
1138
1139  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1140     pseudo if they don't use overlapping words.  We handle only pseudos
1141     here for simplicity.  */
1142  if (code == SUBREG && REG_P (SUBREG_REG (x))
1143      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1144    {
1145      unsigned int i = REGNO (SUBREG_REG (x));
1146
1147      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1148	{
1149	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1150	     the last store to this register really stored into this
1151	     subreg, then remove the memory of this subreg.
1152	     Otherwise, remove any memory of the entire register and
1153	     all its subregs from the table.  */
1154	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1155	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1156	    remove_invalid_refs (i);
1157	  else
1158	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1159	}
1160
1161      REG_IN_TABLE (i) = REG_TICK (i);
1162      SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1163      return 0;
1164    }
1165
1166  /* If X is a comparison or a COMPARE and either operand is a register
1167     that does not have a quantity, give it one.  This is so that a later
1168     call to record_jump_equiv won't cause X to be assigned a different
1169     hash code and not found in the table after that call.
1170
1171     It is not necessary to do this here, since rehash_using_reg can
1172     fix up the table later, but doing this here eliminates the need to
1173     call that expensive function in the most common case where the only
1174     use of the register is in the comparison.  */
1175
1176  if (code == COMPARE || COMPARISON_P (x))
1177    {
1178      if (REG_P (XEXP (x, 0))
1179	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1180	if (insert_regs (XEXP (x, 0), NULL, 0))
1181	  {
1182	    rehash_using_reg (XEXP (x, 0));
1183	    changed = 1;
1184	  }
1185
1186      if (REG_P (XEXP (x, 1))
1187	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1188	if (insert_regs (XEXP (x, 1), NULL, 0))
1189	  {
1190	    rehash_using_reg (XEXP (x, 1));
1191	    changed = 1;
1192	  }
1193    }
1194
1195  fmt = GET_RTX_FORMAT (code);
1196  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1197    if (fmt[i] == 'e')
1198      changed |= mention_regs (XEXP (x, i));
1199    else if (fmt[i] == 'E')
1200      for (j = 0; j < XVECLEN (x, i); j++)
1201	changed |= mention_regs (XVECEXP (x, i, j));
1202
1203  return changed;
1204}
1205
1206/* Update the register quantities for inserting X into the hash table
1207   with a value equivalent to CLASSP.
1208   (If the class does not contain a REG, it is irrelevant.)
1209   If MODIFIED is nonzero, X is a destination; it is being modified.
1210   Note that delete_reg_equiv should be called on a register
1211   before insert_regs is done on that register with MODIFIED != 0.
1212
1213   Nonzero value means that elements of reg_qty have changed
1214   so X's hash code may be different.  */
1215
1216static int
1217insert_regs (rtx x, struct table_elt *classp, int modified)
1218{
1219  if (REG_P (x))
1220    {
1221      unsigned int regno = REGNO (x);
1222      int qty_valid;
1223
1224      /* If REGNO is in the equivalence table already but is of the
1225	 wrong mode for that equivalence, don't do anything here.  */
1226
1227      qty_valid = REGNO_QTY_VALID_P (regno);
1228      if (qty_valid)
1229	{
1230	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1231
1232	  if (ent->mode != GET_MODE (x))
1233	    return 0;
1234	}
1235
1236      if (modified || ! qty_valid)
1237	{
1238	  if (classp)
1239	    for (classp = classp->first_same_value;
1240		 classp != 0;
1241		 classp = classp->next_same_value)
1242	      if (REG_P (classp->exp)
1243		  && GET_MODE (classp->exp) == GET_MODE (x))
1244		{
1245		  unsigned c_regno = REGNO (classp->exp);
1246
1247		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1248
1249		  /* Suppose that 5 is hard reg and 100 and 101 are
1250		     pseudos.  Consider
1251
1252		     (set (reg:si 100) (reg:si 5))
1253		     (set (reg:si 5) (reg:si 100))
1254		     (set (reg:di 101) (reg:di 5))
1255
1256		     We would now set REG_QTY (101) = REG_QTY (5), but the
1257		     entry for 5 is in SImode.  When we use this later in
1258		     copy propagation, we get the register in wrong mode.  */
1259		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1260		    continue;
1261
1262		  make_regs_eqv (regno, c_regno);
1263		  return 1;
1264		}
1265
1266	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1267	     than REG_IN_TABLE to find out if there was only a single preceding
1268	     invalidation - for the SUBREG - or another one, which would be
1269	     for the full register.  However, if we find here that REG_TICK
1270	     indicates that the register is invalid, it means that it has
1271	     been invalidated in a separate operation.  The SUBREG might be used
1272	     now (then this is a recursive call), or we might use the full REG
1273	     now and a SUBREG of it later.  So bump up REG_TICK so that
1274	     mention_regs will do the right thing.  */
1275	  if (! modified
1276	      && REG_IN_TABLE (regno) >= 0
1277	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1278	    REG_TICK (regno)++;
1279	  make_new_qty (regno, GET_MODE (x));
1280	  return 1;
1281	}
1282
1283      return 0;
1284    }
1285
1286  /* If X is a SUBREG, we will likely be inserting the inner register in the
1287     table.  If that register doesn't have an assigned quantity number at
1288     this point but does later, the insertion that we will be doing now will
1289     not be accessible because its hash code will have changed.  So assign
1290     a quantity number now.  */
1291
1292  else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1293	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1294    {
1295      insert_regs (SUBREG_REG (x), NULL, 0);
1296      mention_regs (x);
1297      return 1;
1298    }
1299  else
1300    return mention_regs (x);
1301}
1302
1303/* Look in or update the hash table.  */
1304
1305/* Remove table element ELT from use in the table.
1306   HASH is its hash code, made using the HASH macro.
1307   It's an argument because often that is known in advance
1308   and we save much time not recomputing it.  */
1309
1310static void
1311remove_from_table (struct table_elt *elt, unsigned int hash)
1312{
1313  if (elt == 0)
1314    return;
1315
1316  /* Mark this element as removed.  See cse_insn.  */
1317  elt->first_same_value = 0;
1318
1319  /* Remove the table element from its equivalence class.  */
1320
1321  {
1322    struct table_elt *prev = elt->prev_same_value;
1323    struct table_elt *next = elt->next_same_value;
1324
1325    if (next)
1326      next->prev_same_value = prev;
1327
1328    if (prev)
1329      prev->next_same_value = next;
1330    else
1331      {
1332	struct table_elt *newfirst = next;
1333	while (next)
1334	  {
1335	    next->first_same_value = newfirst;
1336	    next = next->next_same_value;
1337	  }
1338      }
1339  }
1340
1341  /* Remove the table element from its hash bucket.  */
1342
1343  {
1344    struct table_elt *prev = elt->prev_same_hash;
1345    struct table_elt *next = elt->next_same_hash;
1346
1347    if (next)
1348      next->prev_same_hash = prev;
1349
1350    if (prev)
1351      prev->next_same_hash = next;
1352    else if (table[hash] == elt)
1353      table[hash] = next;
1354    else
1355      {
1356	/* This entry is not in the proper hash bucket.  This can happen
1357	   when two classes were merged by `merge_equiv_classes'.  Search
1358	   for the hash bucket that it heads.  This happens only very
1359	   rarely, so the cost is acceptable.  */
1360	for (hash = 0; hash < HASH_SIZE; hash++)
1361	  if (table[hash] == elt)
1362	    table[hash] = next;
1363      }
1364  }
1365
1366  /* Remove the table element from its related-value circular chain.  */
1367
1368  if (elt->related_value != 0 && elt->related_value != elt)
1369    {
1370      struct table_elt *p = elt->related_value;
1371
1372      while (p->related_value != elt)
1373	p = p->related_value;
1374      p->related_value = elt->related_value;
1375      if (p->related_value == p)
1376	p->related_value = 0;
1377    }
1378
1379  /* Now add it to the free element chain.  */
1380  elt->next_same_hash = free_element_chain;
1381  free_element_chain = elt;
1382
1383  table_size--;
1384}
1385
1386/* Same as above, but X is a pseudo-register.  */
1387
1388static void
1389remove_pseudo_from_table (rtx x, unsigned int hash)
1390{
1391  struct table_elt *elt;
1392
1393  /* Because a pseudo-register can be referenced in more than one
1394     mode, we might have to remove more than one table entry.  */
1395  while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1396    remove_from_table (elt, hash);
1397}
1398
1399/* Look up X in the hash table and return its table element,
1400   or 0 if X is not in the table.
1401
1402   MODE is the machine-mode of X, or if X is an integer constant
1403   with VOIDmode then MODE is the mode with which X will be used.
1404
1405   Here we are satisfied to find an expression whose tree structure
1406   looks like X.  */
1407
1408static struct table_elt *
1409lookup (rtx x, unsigned int hash, enum machine_mode mode)
1410{
1411  struct table_elt *p;
1412
1413  for (p = table[hash]; p; p = p->next_same_hash)
1414    if (mode == p->mode && ((x == p->exp && REG_P (x))
1415			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1416      return p;
1417
1418  return 0;
1419}
1420
1421/* Like `lookup' but don't care whether the table element uses invalid regs.
1422   Also ignore discrepancies in the machine mode of a register.  */
1423
1424static struct table_elt *
1425lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1426{
1427  struct table_elt *p;
1428
1429  if (REG_P (x))
1430    {
1431      unsigned int regno = REGNO (x);
1432
1433      /* Don't check the machine mode when comparing registers;
1434	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1435      for (p = table[hash]; p; p = p->next_same_hash)
1436	if (REG_P (p->exp)
1437	    && REGNO (p->exp) == regno)
1438	  return p;
1439    }
1440  else
1441    {
1442      for (p = table[hash]; p; p = p->next_same_hash)
1443	if (mode == p->mode
1444	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1445	  return p;
1446    }
1447
1448  return 0;
1449}
1450
1451/* Look for an expression equivalent to X and with code CODE.
1452   If one is found, return that expression.  */
1453
1454static rtx
1455lookup_as_function (rtx x, enum rtx_code code)
1456{
1457  struct table_elt *p
1458    = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1459
1460  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1461     long as we are narrowing.  So if we looked in vain for a mode narrower
1462     than word_mode before, look for word_mode now.  */
1463  if (p == 0 && code == CONST_INT
1464      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1465    {
1466      x = copy_rtx (x);
1467      PUT_MODE (x, word_mode);
1468      p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1469    }
1470
1471  if (p == 0)
1472    return 0;
1473
1474  for (p = p->first_same_value; p; p = p->next_same_value)
1475    if (GET_CODE (p->exp) == code
1476	/* Make sure this is a valid entry in the table.  */
1477	&& exp_equiv_p (p->exp, p->exp, 1, false))
1478      return p->exp;
1479
1480  return 0;
1481}
1482
1483/* Insert X in the hash table, assuming HASH is its hash code
1484   and CLASSP is an element of the class it should go in
1485   (or 0 if a new class should be made).
1486   It is inserted at the proper position to keep the class in
1487   the order cheapest first.
1488
1489   MODE is the machine-mode of X, or if X is an integer constant
1490   with VOIDmode then MODE is the mode with which X will be used.
1491
1492   For elements of equal cheapness, the most recent one
1493   goes in front, except that the first element in the list
1494   remains first unless a cheaper element is added.  The order of
1495   pseudo-registers does not matter, as canon_reg will be called to
1496   find the cheapest when a register is retrieved from the table.
1497
1498   The in_memory field in the hash table element is set to 0.
1499   The caller must set it nonzero if appropriate.
1500
1501   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1502   and if insert_regs returns a nonzero value
1503   you must then recompute its hash code before calling here.
1504
1505   If necessary, update table showing constant values of quantities.  */
1506
1507#define CHEAPER(X, Y) \
1508 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1509
1510static struct table_elt *
1511insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1512{
1513  struct table_elt *elt;
1514
1515  /* If X is a register and we haven't made a quantity for it,
1516     something is wrong.  */
1517  gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1518
1519  /* If X is a hard register, show it is being put in the table.  */
1520  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1521    {
1522      unsigned int regno = REGNO (x);
1523      unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1524      unsigned int i;
1525
1526      for (i = regno; i < endregno; i++)
1527	SET_HARD_REG_BIT (hard_regs_in_table, i);
1528    }
1529
1530  /* Put an element for X into the right hash bucket.  */
1531
1532  elt = free_element_chain;
1533  if (elt)
1534    free_element_chain = elt->next_same_hash;
1535  else
1536    elt = xmalloc (sizeof (struct table_elt));
1537
1538  elt->exp = x;
1539  elt->canon_exp = NULL_RTX;
1540  elt->cost = COST (x);
1541  elt->regcost = approx_reg_cost (x);
1542  elt->next_same_value = 0;
1543  elt->prev_same_value = 0;
1544  elt->next_same_hash = table[hash];
1545  elt->prev_same_hash = 0;
1546  elt->related_value = 0;
1547  elt->in_memory = 0;
1548  elt->mode = mode;
1549  elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1550
1551  if (table[hash])
1552    table[hash]->prev_same_hash = elt;
1553  table[hash] = elt;
1554
1555  /* Put it into the proper value-class.  */
1556  if (classp)
1557    {
1558      classp = classp->first_same_value;
1559      if (CHEAPER (elt, classp))
1560	/* Insert at the head of the class.  */
1561	{
1562	  struct table_elt *p;
1563	  elt->next_same_value = classp;
1564	  classp->prev_same_value = elt;
1565	  elt->first_same_value = elt;
1566
1567	  for (p = classp; p; p = p->next_same_value)
1568	    p->first_same_value = elt;
1569	}
1570      else
1571	{
1572	  /* Insert not at head of the class.  */
1573	  /* Put it after the last element cheaper than X.  */
1574	  struct table_elt *p, *next;
1575
1576	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1577	       p = next);
1578
1579	  /* Put it after P and before NEXT.  */
1580	  elt->next_same_value = next;
1581	  if (next)
1582	    next->prev_same_value = elt;
1583
1584	  elt->prev_same_value = p;
1585	  p->next_same_value = elt;
1586	  elt->first_same_value = classp;
1587	}
1588    }
1589  else
1590    elt->first_same_value = elt;
1591
1592  /* If this is a constant being set equivalent to a register or a register
1593     being set equivalent to a constant, note the constant equivalence.
1594
1595     If this is a constant, it cannot be equivalent to a different constant,
1596     and a constant is the only thing that can be cheaper than a register.  So
1597     we know the register is the head of the class (before the constant was
1598     inserted).
1599
1600     If this is a register that is not already known equivalent to a
1601     constant, we must check the entire class.
1602
1603     If this is a register that is already known equivalent to an insn,
1604     update the qtys `const_insn' to show that `this_insn' is the latest
1605     insn making that quantity equivalent to the constant.  */
1606
1607  if (elt->is_const && classp && REG_P (classp->exp)
1608      && !REG_P (x))
1609    {
1610      int exp_q = REG_QTY (REGNO (classp->exp));
1611      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1612
1613      exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1614      exp_ent->const_insn = this_insn;
1615    }
1616
1617  else if (REG_P (x)
1618	   && classp
1619	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1620	   && ! elt->is_const)
1621    {
1622      struct table_elt *p;
1623
1624      for (p = classp; p != 0; p = p->next_same_value)
1625	{
1626	  if (p->is_const && !REG_P (p->exp))
1627	    {
1628	      int x_q = REG_QTY (REGNO (x));
1629	      struct qty_table_elem *x_ent = &qty_table[x_q];
1630
1631	      x_ent->const_rtx
1632		= gen_lowpart (GET_MODE (x), p->exp);
1633	      x_ent->const_insn = this_insn;
1634	      break;
1635	    }
1636	}
1637    }
1638
1639  else if (REG_P (x)
1640	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1641	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1642    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1643
1644  /* If this is a constant with symbolic value,
1645     and it has a term with an explicit integer value,
1646     link it up with related expressions.  */
1647  if (GET_CODE (x) == CONST)
1648    {
1649      rtx subexp = get_related_value (x);
1650      unsigned subhash;
1651      struct table_elt *subelt, *subelt_prev;
1652
1653      if (subexp != 0)
1654	{
1655	  /* Get the integer-free subexpression in the hash table.  */
1656	  subhash = SAFE_HASH (subexp, mode);
1657	  subelt = lookup (subexp, subhash, mode);
1658	  if (subelt == 0)
1659	    subelt = insert (subexp, NULL, subhash, mode);
1660	  /* Initialize SUBELT's circular chain if it has none.  */
1661	  if (subelt->related_value == 0)
1662	    subelt->related_value = subelt;
1663	  /* Find the element in the circular chain that precedes SUBELT.  */
1664	  subelt_prev = subelt;
1665	  while (subelt_prev->related_value != subelt)
1666	    subelt_prev = subelt_prev->related_value;
1667	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1668	     This way the element that follows SUBELT is the oldest one.  */
1669	  elt->related_value = subelt_prev->related_value;
1670	  subelt_prev->related_value = elt;
1671	}
1672    }
1673
1674  table_size++;
1675
1676  return elt;
1677}
1678
1679/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1680   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1681   the two classes equivalent.
1682
1683   CLASS1 will be the surviving class; CLASS2 should not be used after this
1684   call.
1685
1686   Any invalid entries in CLASS2 will not be copied.  */
1687
1688static void
1689merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1690{
1691  struct table_elt *elt, *next, *new;
1692
1693  /* Ensure we start with the head of the classes.  */
1694  class1 = class1->first_same_value;
1695  class2 = class2->first_same_value;
1696
1697  /* If they were already equal, forget it.  */
1698  if (class1 == class2)
1699    return;
1700
1701  for (elt = class2; elt; elt = next)
1702    {
1703      unsigned int hash;
1704      rtx exp = elt->exp;
1705      enum machine_mode mode = elt->mode;
1706
1707      next = elt->next_same_value;
1708
1709      /* Remove old entry, make a new one in CLASS1's class.
1710	 Don't do this for invalid entries as we cannot find their
1711	 hash code (it also isn't necessary).  */
1712      if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1713	{
1714	  bool need_rehash = false;
1715
1716	  hash_arg_in_memory = 0;
1717	  hash = HASH (exp, mode);
1718
1719	  if (REG_P (exp))
1720	    {
1721	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1722	      delete_reg_equiv (REGNO (exp));
1723	    }
1724
1725	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1726	    remove_pseudo_from_table (exp, hash);
1727	  else
1728	    remove_from_table (elt, hash);
1729
1730	  if (insert_regs (exp, class1, 0) || need_rehash)
1731	    {
1732	      rehash_using_reg (exp);
1733	      hash = HASH (exp, mode);
1734	    }
1735	  new = insert (exp, class1, hash, mode);
1736	  new->in_memory = hash_arg_in_memory;
1737	}
1738    }
1739}
1740
1741/* Flush the entire hash table.  */
1742
1743static void
1744flush_hash_table (void)
1745{
1746  int i;
1747  struct table_elt *p;
1748
1749  for (i = 0; i < HASH_SIZE; i++)
1750    for (p = table[i]; p; p = table[i])
1751      {
1752	/* Note that invalidate can remove elements
1753	   after P in the current hash chain.  */
1754	if (REG_P (p->exp))
1755	  invalidate (p->exp, p->mode);
1756	else
1757	  remove_from_table (p, i);
1758      }
1759}
1760
1761/* Function called for each rtx to check whether true dependence exist.  */
1762struct check_dependence_data
1763{
1764  enum machine_mode mode;
1765  rtx exp;
1766  rtx addr;
1767};
1768
1769static int
1770check_dependence (rtx *x, void *data)
1771{
1772  struct check_dependence_data *d = (struct check_dependence_data *) data;
1773  if (*x && MEM_P (*x))
1774    return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1775		    		  cse_rtx_varies_p);
1776  else
1777    return 0;
1778}
1779
1780/* Remove from the hash table, or mark as invalid, all expressions whose
1781   values could be altered by storing in X.  X is a register, a subreg, or
1782   a memory reference with nonvarying address (because, when a memory
1783   reference with a varying address is stored in, all memory references are
1784   removed by invalidate_memory so specific invalidation is superfluous).
1785   FULL_MODE, if not VOIDmode, indicates that this much should be
1786   invalidated instead of just the amount indicated by the mode of X.  This
1787   is only used for bitfield stores into memory.
1788
1789   A nonvarying address may be just a register or just a symbol reference,
1790   or it may be either of those plus a numeric offset.  */
1791
1792static void
1793invalidate (rtx x, enum machine_mode full_mode)
1794{
1795  int i;
1796  struct table_elt *p;
1797  rtx addr;
1798
1799  switch (GET_CODE (x))
1800    {
1801    case REG:
1802      {
1803	/* If X is a register, dependencies on its contents are recorded
1804	   through the qty number mechanism.  Just change the qty number of
1805	   the register, mark it as invalid for expressions that refer to it,
1806	   and remove it itself.  */
1807	unsigned int regno = REGNO (x);
1808	unsigned int hash = HASH (x, GET_MODE (x));
1809
1810	/* Remove REGNO from any quantity list it might be on and indicate
1811	   that its value might have changed.  If it is a pseudo, remove its
1812	   entry from the hash table.
1813
1814	   For a hard register, we do the first two actions above for any
1815	   additional hard registers corresponding to X.  Then, if any of these
1816	   registers are in the table, we must remove any REG entries that
1817	   overlap these registers.  */
1818
1819	delete_reg_equiv (regno);
1820	REG_TICK (regno)++;
1821	SUBREG_TICKED (regno) = -1;
1822
1823	if (regno >= FIRST_PSEUDO_REGISTER)
1824	  remove_pseudo_from_table (x, hash);
1825	else
1826	  {
1827	    HOST_WIDE_INT in_table
1828	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1829	    unsigned int endregno
1830	      = regno + hard_regno_nregs[regno][GET_MODE (x)];
1831	    unsigned int tregno, tendregno, rn;
1832	    struct table_elt *p, *next;
1833
1834	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1835
1836	    for (rn = regno + 1; rn < endregno; rn++)
1837	      {
1838		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1839		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1840		delete_reg_equiv (rn);
1841		REG_TICK (rn)++;
1842		SUBREG_TICKED (rn) = -1;
1843	      }
1844
1845	    if (in_table)
1846	      for (hash = 0; hash < HASH_SIZE; hash++)
1847		for (p = table[hash]; p; p = next)
1848		  {
1849		    next = p->next_same_hash;
1850
1851		    if (!REG_P (p->exp)
1852			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1853		      continue;
1854
1855		    tregno = REGNO (p->exp);
1856		    tendregno
1857		      = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1858		    if (tendregno > regno && tregno < endregno)
1859		      remove_from_table (p, hash);
1860		  }
1861	  }
1862      }
1863      return;
1864
1865    case SUBREG:
1866      invalidate (SUBREG_REG (x), VOIDmode);
1867      return;
1868
1869    case PARALLEL:
1870      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1871	invalidate (XVECEXP (x, 0, i), VOIDmode);
1872      return;
1873
1874    case EXPR_LIST:
1875      /* This is part of a disjoint return value; extract the location in
1876	 question ignoring the offset.  */
1877      invalidate (XEXP (x, 0), VOIDmode);
1878      return;
1879
1880    case MEM:
1881      addr = canon_rtx (get_addr (XEXP (x, 0)));
1882      /* Calculate the canonical version of X here so that
1883	 true_dependence doesn't generate new RTL for X on each call.  */
1884      x = canon_rtx (x);
1885
1886      /* Remove all hash table elements that refer to overlapping pieces of
1887	 memory.  */
1888      if (full_mode == VOIDmode)
1889	full_mode = GET_MODE (x);
1890
1891      for (i = 0; i < HASH_SIZE; i++)
1892	{
1893	  struct table_elt *next;
1894
1895	  for (p = table[i]; p; p = next)
1896	    {
1897	      next = p->next_same_hash;
1898	      if (p->in_memory)
1899		{
1900		  struct check_dependence_data d;
1901
1902		  /* Just canonicalize the expression once;
1903		     otherwise each time we call invalidate
1904		     true_dependence will canonicalize the
1905		     expression again.  */
1906		  if (!p->canon_exp)
1907		    p->canon_exp = canon_rtx (p->exp);
1908		  d.exp = x;
1909		  d.addr = addr;
1910		  d.mode = full_mode;
1911		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1912		    remove_from_table (p, i);
1913		}
1914	    }
1915	}
1916      return;
1917
1918    default:
1919      gcc_unreachable ();
1920    }
1921}
1922
1923/* Remove all expressions that refer to register REGNO,
1924   since they are already invalid, and we are about to
1925   mark that register valid again and don't want the old
1926   expressions to reappear as valid.  */
1927
1928static void
1929remove_invalid_refs (unsigned int regno)
1930{
1931  unsigned int i;
1932  struct table_elt *p, *next;
1933
1934  for (i = 0; i < HASH_SIZE; i++)
1935    for (p = table[i]; p; p = next)
1936      {
1937	next = p->next_same_hash;
1938	if (!REG_P (p->exp)
1939	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1940	  remove_from_table (p, i);
1941      }
1942}
1943
1944/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1945   and mode MODE.  */
1946static void
1947remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1948			    enum machine_mode mode)
1949{
1950  unsigned int i;
1951  struct table_elt *p, *next;
1952  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1953
1954  for (i = 0; i < HASH_SIZE; i++)
1955    for (p = table[i]; p; p = next)
1956      {
1957	rtx exp = p->exp;
1958	next = p->next_same_hash;
1959
1960	if (!REG_P (exp)
1961	    && (GET_CODE (exp) != SUBREG
1962		|| !REG_P (SUBREG_REG (exp))
1963		|| REGNO (SUBREG_REG (exp)) != regno
1964		|| (((SUBREG_BYTE (exp)
1965		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1966		    && SUBREG_BYTE (exp) <= end))
1967	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1968	  remove_from_table (p, i);
1969      }
1970}
1971
1972/* Recompute the hash codes of any valid entries in the hash table that
1973   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1974
1975   This is called when we make a jump equivalence.  */
1976
1977static void
1978rehash_using_reg (rtx x)
1979{
1980  unsigned int i;
1981  struct table_elt *p, *next;
1982  unsigned hash;
1983
1984  if (GET_CODE (x) == SUBREG)
1985    x = SUBREG_REG (x);
1986
1987  /* If X is not a register or if the register is known not to be in any
1988     valid entries in the table, we have no work to do.  */
1989
1990  if (!REG_P (x)
1991      || REG_IN_TABLE (REGNO (x)) < 0
1992      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1993    return;
1994
1995  /* Scan all hash chains looking for valid entries that mention X.
1996     If we find one and it is in the wrong hash chain, move it.  */
1997
1998  for (i = 0; i < HASH_SIZE; i++)
1999    for (p = table[i]; p; p = next)
2000      {
2001	next = p->next_same_hash;
2002	if (reg_mentioned_p (x, p->exp)
2003	    && exp_equiv_p (p->exp, p->exp, 1, false)
2004	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2005	  {
2006	    if (p->next_same_hash)
2007	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2008
2009	    if (p->prev_same_hash)
2010	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2011	    else
2012	      table[i] = p->next_same_hash;
2013
2014	    p->next_same_hash = table[hash];
2015	    p->prev_same_hash = 0;
2016	    if (table[hash])
2017	      table[hash]->prev_same_hash = p;
2018	    table[hash] = p;
2019	  }
2020      }
2021}
2022
2023/* Remove from the hash table any expression that is a call-clobbered
2024   register.  Also update their TICK values.  */
2025
2026static void
2027invalidate_for_call (void)
2028{
2029  unsigned int regno, endregno;
2030  unsigned int i;
2031  unsigned hash;
2032  struct table_elt *p, *next;
2033  int in_table = 0;
2034
2035  /* Go through all the hard registers.  For each that is clobbered in
2036     a CALL_INSN, remove the register from quantity chains and update
2037     reg_tick if defined.  Also see if any of these registers is currently
2038     in the table.  */
2039
2040  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2041    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2042      {
2043	delete_reg_equiv (regno);
2044	if (REG_TICK (regno) >= 0)
2045	  {
2046	    REG_TICK (regno)++;
2047	    SUBREG_TICKED (regno) = -1;
2048	  }
2049
2050	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2051      }
2052
2053  /* In the case where we have no call-clobbered hard registers in the
2054     table, we are done.  Otherwise, scan the table and remove any
2055     entry that overlaps a call-clobbered register.  */
2056
2057  if (in_table)
2058    for (hash = 0; hash < HASH_SIZE; hash++)
2059      for (p = table[hash]; p; p = next)
2060	{
2061	  next = p->next_same_hash;
2062
2063	  if (!REG_P (p->exp)
2064	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2065	    continue;
2066
2067	  regno = REGNO (p->exp);
2068	  endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2069
2070	  for (i = regno; i < endregno; i++)
2071	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2072	      {
2073		remove_from_table (p, hash);
2074		break;
2075	      }
2076	}
2077}
2078
2079/* Given an expression X of type CONST,
2080   and ELT which is its table entry (or 0 if it
2081   is not in the hash table),
2082   return an alternate expression for X as a register plus integer.
2083   If none can be found, return 0.  */
2084
2085static rtx
2086use_related_value (rtx x, struct table_elt *elt)
2087{
2088  struct table_elt *relt = 0;
2089  struct table_elt *p, *q;
2090  HOST_WIDE_INT offset;
2091
2092  /* First, is there anything related known?
2093     If we have a table element, we can tell from that.
2094     Otherwise, must look it up.  */
2095
2096  if (elt != 0 && elt->related_value != 0)
2097    relt = elt;
2098  else if (elt == 0 && GET_CODE (x) == CONST)
2099    {
2100      rtx subexp = get_related_value (x);
2101      if (subexp != 0)
2102	relt = lookup (subexp,
2103		       SAFE_HASH (subexp, GET_MODE (subexp)),
2104		       GET_MODE (subexp));
2105    }
2106
2107  if (relt == 0)
2108    return 0;
2109
2110  /* Search all related table entries for one that has an
2111     equivalent register.  */
2112
2113  p = relt;
2114  while (1)
2115    {
2116      /* This loop is strange in that it is executed in two different cases.
2117	 The first is when X is already in the table.  Then it is searching
2118	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2119	 X is not in the table.  Then RELT points to a class for the related
2120	 value.
2121
2122	 Ensure that, whatever case we are in, that we ignore classes that have
2123	 the same value as X.  */
2124
2125      if (rtx_equal_p (x, p->exp))
2126	q = 0;
2127      else
2128	for (q = p->first_same_value; q; q = q->next_same_value)
2129	  if (REG_P (q->exp))
2130	    break;
2131
2132      if (q)
2133	break;
2134
2135      p = p->related_value;
2136
2137      /* We went all the way around, so there is nothing to be found.
2138	 Alternatively, perhaps RELT was in the table for some other reason
2139	 and it has no related values recorded.  */
2140      if (p == relt || p == 0)
2141	break;
2142    }
2143
2144  if (q == 0)
2145    return 0;
2146
2147  offset = (get_integer_term (x) - get_integer_term (p->exp));
2148  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2149  return plus_constant (q->exp, offset);
2150}
2151
2152/* Hash a string.  Just add its bytes up.  */
2153static inline unsigned
2154hash_rtx_string (const char *ps)
2155{
2156  unsigned hash = 0;
2157  const unsigned char *p = (const unsigned char *) ps;
2158
2159  if (p)
2160    while (*p)
2161      hash += *p++;
2162
2163  return hash;
2164}
2165
2166/* Hash an rtx.  We are careful to make sure the value is never negative.
2167   Equivalent registers hash identically.
2168   MODE is used in hashing for CONST_INTs only;
2169   otherwise the mode of X is used.
2170
2171   Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2172
2173   If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2174   a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2175
2176   Note that cse_insn knows that the hash code of a MEM expression
2177   is just (int) MEM plus the hash code of the address.  */
2178
2179unsigned
2180hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2181	  int *hash_arg_in_memory_p, bool have_reg_qty)
2182{
2183  int i, j;
2184  unsigned hash = 0;
2185  enum rtx_code code;
2186  const char *fmt;
2187
2188  /* Used to turn recursion into iteration.  We can't rely on GCC's
2189     tail-recursion elimination since we need to keep accumulating values
2190     in HASH.  */
2191 repeat:
2192  if (x == 0)
2193    return hash;
2194
2195  code = GET_CODE (x);
2196  switch (code)
2197    {
2198    case REG:
2199      {
2200	unsigned int regno = REGNO (x);
2201
2202	if (!reload_completed)
2203	  {
2204	    /* On some machines, we can't record any non-fixed hard register,
2205	       because extending its life will cause reload problems.  We
2206	       consider ap, fp, sp, gp to be fixed for this purpose.
2207
2208	       We also consider CCmode registers to be fixed for this purpose;
2209	       failure to do so leads to failure to simplify 0<100 type of
2210	       conditionals.
2211
2212	       On all machines, we can't record any global registers.
2213	       Nor should we record any register that is in a small
2214	       class, as defined by CLASS_LIKELY_SPILLED_P.  */
2215	    bool record;
2216
2217	    if (regno >= FIRST_PSEUDO_REGISTER)
2218	      record = true;
2219	    else if (x == frame_pointer_rtx
2220		     || x == hard_frame_pointer_rtx
2221		     || x == arg_pointer_rtx
2222		     || x == stack_pointer_rtx
2223		     || x == pic_offset_table_rtx)
2224	      record = true;
2225	    else if (global_regs[regno])
2226	      record = false;
2227	    else if (fixed_regs[regno])
2228	      record = true;
2229	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2230	      record = true;
2231	    else if (SMALL_REGISTER_CLASSES)
2232	      record = false;
2233	    else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2234	      record = false;
2235	    else
2236	      record = true;
2237
2238	    if (!record)
2239	      {
2240		*do_not_record_p = 1;
2241		return 0;
2242	      }
2243	  }
2244
2245	hash += ((unsigned int) REG << 7);
2246        hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2247	return hash;
2248      }
2249
2250    /* We handle SUBREG of a REG specially because the underlying
2251       reg changes its hash value with every value change; we don't
2252       want to have to forget unrelated subregs when one subreg changes.  */
2253    case SUBREG:
2254      {
2255	if (REG_P (SUBREG_REG (x)))
2256	  {
2257	    hash += (((unsigned int) SUBREG << 7)
2258		     + REGNO (SUBREG_REG (x))
2259		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2260	    return hash;
2261	  }
2262	break;
2263      }
2264
2265    case CONST_INT:
2266      hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2267               + (unsigned int) INTVAL (x));
2268      return hash;
2269
2270    case CONST_DOUBLE:
2271      /* This is like the general case, except that it only counts
2272	 the integers representing the constant.  */
2273      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2274      if (GET_MODE (x) != VOIDmode)
2275	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2276      else
2277	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2278		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2279      return hash;
2280
2281    case CONST_VECTOR:
2282      {
2283	int units;
2284	rtx elt;
2285
2286	units = CONST_VECTOR_NUNITS (x);
2287
2288	for (i = 0; i < units; ++i)
2289	  {
2290	    elt = CONST_VECTOR_ELT (x, i);
2291	    hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2292			      hash_arg_in_memory_p, have_reg_qty);
2293	  }
2294
2295	return hash;
2296      }
2297
2298      /* Assume there is only one rtx object for any given label.  */
2299    case LABEL_REF:
2300      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2301	 differences and differences between each stage's debugging dumps.  */
2302	 hash += (((unsigned int) LABEL_REF << 7)
2303		  + CODE_LABEL_NUMBER (XEXP (x, 0)));
2304      return hash;
2305
2306    case SYMBOL_REF:
2307      {
2308	/* Don't hash on the symbol's address to avoid bootstrap differences.
2309	   Different hash values may cause expressions to be recorded in
2310	   different orders and thus different registers to be used in the
2311	   final assembler.  This also avoids differences in the dump files
2312	   between various stages.  */
2313	unsigned int h = 0;
2314	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2315
2316	while (*p)
2317	  h += (h << 7) + *p++; /* ??? revisit */
2318
2319	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2320	return hash;
2321      }
2322
2323    case MEM:
2324      /* We don't record if marked volatile or if BLKmode since we don't
2325	 know the size of the move.  */
2326      if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2327	{
2328	  *do_not_record_p = 1;
2329	  return 0;
2330	}
2331      if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2332	*hash_arg_in_memory_p = 1;
2333
2334      /* Now that we have already found this special case,
2335	 might as well speed it up as much as possible.  */
2336      hash += (unsigned) MEM;
2337      x = XEXP (x, 0);
2338      goto repeat;
2339
2340    case USE:
2341      /* A USE that mentions non-volatile memory needs special
2342	 handling since the MEM may be BLKmode which normally
2343	 prevents an entry from being made.  Pure calls are
2344	 marked by a USE which mentions BLKmode memory.
2345	 See calls.c:emit_call_1.  */
2346      if (MEM_P (XEXP (x, 0))
2347	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2348	{
2349	  hash += (unsigned) USE;
2350	  x = XEXP (x, 0);
2351
2352	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2353	    *hash_arg_in_memory_p = 1;
2354
2355	  /* Now that we have already found this special case,
2356	     might as well speed it up as much as possible.  */
2357	  hash += (unsigned) MEM;
2358	  x = XEXP (x, 0);
2359	  goto repeat;
2360	}
2361      break;
2362
2363    case PRE_DEC:
2364    case PRE_INC:
2365    case POST_DEC:
2366    case POST_INC:
2367    case PRE_MODIFY:
2368    case POST_MODIFY:
2369    case PC:
2370    case CC0:
2371    case CALL:
2372    case UNSPEC_VOLATILE:
2373      *do_not_record_p = 1;
2374      return 0;
2375
2376    case ASM_OPERANDS:
2377      if (MEM_VOLATILE_P (x))
2378	{
2379	  *do_not_record_p = 1;
2380	  return 0;
2381	}
2382      else
2383	{
2384	  /* We don't want to take the filename and line into account.  */
2385	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2386	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2387	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2388	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2389
2390	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2391	    {
2392	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2393		{
2394		  hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2395				     GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2396				     do_not_record_p, hash_arg_in_memory_p,
2397				     have_reg_qty)
2398			   + hash_rtx_string
2399				(ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2400		}
2401
2402	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2403	      x = ASM_OPERANDS_INPUT (x, 0);
2404	      mode = GET_MODE (x);
2405	      goto repeat;
2406	    }
2407
2408	  return hash;
2409	}
2410      break;
2411
2412    default:
2413      break;
2414    }
2415
2416  i = GET_RTX_LENGTH (code) - 1;
2417  hash += (unsigned) code + (unsigned) GET_MODE (x);
2418  fmt = GET_RTX_FORMAT (code);
2419  for (; i >= 0; i--)
2420    {
2421      switch (fmt[i])
2422	{
2423	case 'e':
2424	  /* If we are about to do the last recursive call
2425	     needed at this level, change it into iteration.
2426	     This function  is called enough to be worth it.  */
2427	  if (i == 0)
2428	    {
2429	      x = XEXP (x, i);
2430	      goto repeat;
2431	    }
2432
2433	  hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2434			    hash_arg_in_memory_p, have_reg_qty);
2435	  break;
2436
2437	case 'E':
2438	  for (j = 0; j < XVECLEN (x, i); j++)
2439	    hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2440			      hash_arg_in_memory_p, have_reg_qty);
2441	  break;
2442
2443	case 's':
2444	  hash += hash_rtx_string (XSTR (x, i));
2445	  break;
2446
2447	case 'i':
2448	  hash += (unsigned int) XINT (x, i);
2449	  break;
2450
2451	case '0': case 't':
2452	  /* Unused.  */
2453	  break;
2454
2455	default:
2456	  gcc_unreachable ();
2457	}
2458    }
2459
2460  return hash;
2461}
2462
2463/* Hash an rtx X for cse via hash_rtx.
2464   Stores 1 in do_not_record if any subexpression is volatile.
2465   Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2466   does not have the RTX_UNCHANGING_P bit set.  */
2467
2468static inline unsigned
2469canon_hash (rtx x, enum machine_mode mode)
2470{
2471  return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2472}
2473
2474/* Like canon_hash but with no side effects, i.e. do_not_record
2475   and hash_arg_in_memory are not changed.  */
2476
2477static inline unsigned
2478safe_hash (rtx x, enum machine_mode mode)
2479{
2480  int dummy_do_not_record;
2481  return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2482}
2483
2484/* Return 1 iff X and Y would canonicalize into the same thing,
2485   without actually constructing the canonicalization of either one.
2486   If VALIDATE is nonzero,
2487   we assume X is an expression being processed from the rtl
2488   and Y was found in the hash table.  We check register refs
2489   in Y for being marked as valid.
2490
2491   If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2492
2493int
2494exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2495{
2496  int i, j;
2497  enum rtx_code code;
2498  const char *fmt;
2499
2500  /* Note: it is incorrect to assume an expression is equivalent to itself
2501     if VALIDATE is nonzero.  */
2502  if (x == y && !validate)
2503    return 1;
2504
2505  if (x == 0 || y == 0)
2506    return x == y;
2507
2508  code = GET_CODE (x);
2509  if (code != GET_CODE (y))
2510    return 0;
2511
2512  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2513  if (GET_MODE (x) != GET_MODE (y))
2514    return 0;
2515
2516  switch (code)
2517    {
2518    case PC:
2519    case CC0:
2520    case CONST_INT:
2521    case CONST_DOUBLE:
2522      return x == y;
2523
2524    case LABEL_REF:
2525      return XEXP (x, 0) == XEXP (y, 0);
2526
2527    case SYMBOL_REF:
2528      return XSTR (x, 0) == XSTR (y, 0);
2529
2530    case REG:
2531      if (for_gcse)
2532	return REGNO (x) == REGNO (y);
2533      else
2534	{
2535	  unsigned int regno = REGNO (y);
2536	  unsigned int i;
2537	  unsigned int endregno
2538	    = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2539		       : hard_regno_nregs[regno][GET_MODE (y)]);
2540
2541	  /* If the quantities are not the same, the expressions are not
2542	     equivalent.  If there are and we are not to validate, they
2543	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2544
2545	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2546	    return 0;
2547
2548	  if (! validate)
2549	    return 1;
2550
2551	  for (i = regno; i < endregno; i++)
2552	    if (REG_IN_TABLE (i) != REG_TICK (i))
2553	      return 0;
2554
2555	  return 1;
2556	}
2557
2558    case MEM:
2559      if (for_gcse)
2560	{
2561	  /* A volatile mem should not be considered equivalent to any
2562	     other.  */
2563	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2564	    return 0;
2565
2566	  /* Can't merge two expressions in different alias sets, since we
2567	     can decide that the expression is transparent in a block when
2568	     it isn't, due to it being set with the different alias set.
2569
2570	     Also, can't merge two expressions with different MEM_ATTRS.
2571	     They could e.g. be two different entities allocated into the
2572	     same space on the stack (see e.g. PR25130).  In that case, the
2573	     MEM addresses can be the same, even though the two MEMs are
2574	     absolutely not equivalent.
2575
2576	     But because really all MEM attributes should be the same for
2577	     equivalent MEMs, we just use the invariant that MEMs that have
2578	     the same attributes share the same mem_attrs data structure.  */
2579	  if (MEM_ATTRS (x) != MEM_ATTRS (y))
2580	    return 0;
2581	}
2582      break;
2583
2584    /*  For commutative operations, check both orders.  */
2585    case PLUS:
2586    case MULT:
2587    case AND:
2588    case IOR:
2589    case XOR:
2590    case NE:
2591    case EQ:
2592      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2593			     validate, for_gcse)
2594	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2595				validate, for_gcse))
2596	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2597				validate, for_gcse)
2598		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2599				   validate, for_gcse)));
2600
2601    case ASM_OPERANDS:
2602      /* We don't use the generic code below because we want to
2603	 disregard filename and line numbers.  */
2604
2605      /* A volatile asm isn't equivalent to any other.  */
2606      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2607	return 0;
2608
2609      if (GET_MODE (x) != GET_MODE (y)
2610	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2611	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2612		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2613	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2614	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2615	return 0;
2616
2617      if (ASM_OPERANDS_INPUT_LENGTH (x))
2618	{
2619	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2620	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2621			       ASM_OPERANDS_INPUT (y, i),
2622			       validate, for_gcse)
2623		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2624			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2625	      return 0;
2626	}
2627
2628      return 1;
2629
2630    default:
2631      break;
2632    }
2633
2634  /* Compare the elements.  If any pair of corresponding elements
2635     fail to match, return 0 for the whole thing.  */
2636
2637  fmt = GET_RTX_FORMAT (code);
2638  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2639    {
2640      switch (fmt[i])
2641	{
2642	case 'e':
2643	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2644			      validate, for_gcse))
2645	    return 0;
2646	  break;
2647
2648	case 'E':
2649	  if (XVECLEN (x, i) != XVECLEN (y, i))
2650	    return 0;
2651	  for (j = 0; j < XVECLEN (x, i); j++)
2652	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2653				validate, for_gcse))
2654	      return 0;
2655	  break;
2656
2657	case 's':
2658	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2659	    return 0;
2660	  break;
2661
2662	case 'i':
2663	  if (XINT (x, i) != XINT (y, i))
2664	    return 0;
2665	  break;
2666
2667	case 'w':
2668	  if (XWINT (x, i) != XWINT (y, i))
2669	    return 0;
2670	  break;
2671
2672	case '0':
2673	case 't':
2674	  break;
2675
2676	default:
2677	  gcc_unreachable ();
2678	}
2679    }
2680
2681  return 1;
2682}
2683
2684/* Return 1 if X has a value that can vary even between two
2685   executions of the program.  0 means X can be compared reliably
2686   against certain constants or near-constants.  */
2687
2688static int
2689cse_rtx_varies_p (rtx x, int from_alias)
2690{
2691  /* We need not check for X and the equivalence class being of the same
2692     mode because if X is equivalent to a constant in some mode, it
2693     doesn't vary in any mode.  */
2694
2695  if (REG_P (x)
2696      && REGNO_QTY_VALID_P (REGNO (x)))
2697    {
2698      int x_q = REG_QTY (REGNO (x));
2699      struct qty_table_elem *x_ent = &qty_table[x_q];
2700
2701      if (GET_MODE (x) == x_ent->mode
2702	  && x_ent->const_rtx != NULL_RTX)
2703	return 0;
2704    }
2705
2706  if (GET_CODE (x) == PLUS
2707      && GET_CODE (XEXP (x, 1)) == CONST_INT
2708      && REG_P (XEXP (x, 0))
2709      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2710    {
2711      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2712      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2713
2714      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2715	  && x0_ent->const_rtx != NULL_RTX)
2716	return 0;
2717    }
2718
2719  /* This can happen as the result of virtual register instantiation, if
2720     the initial constant is too large to be a valid address.  This gives
2721     us a three instruction sequence, load large offset into a register,
2722     load fp minus a constant into a register, then a MEM which is the
2723     sum of the two `constant' registers.  */
2724  if (GET_CODE (x) == PLUS
2725      && REG_P (XEXP (x, 0))
2726      && REG_P (XEXP (x, 1))
2727      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2728      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2729    {
2730      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2731      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2732      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2733      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2734
2735      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2736	  && x0_ent->const_rtx != NULL_RTX
2737	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2738	  && x1_ent->const_rtx != NULL_RTX)
2739	return 0;
2740    }
2741
2742  return rtx_varies_p (x, from_alias);
2743}
2744
2745/* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2746   the result if necessary.  INSN is as for canon_reg.  */
2747
2748static void
2749validate_canon_reg (rtx *xloc, rtx insn)
2750{
2751  rtx new = canon_reg (*xloc, insn);
2752  int insn_code;
2753
2754  /* If replacing pseudo with hard reg or vice versa, ensure the
2755     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2756  if (insn != 0 && new != 0
2757      && REG_P (new) && REG_P (*xloc)
2758      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2759	   != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2760	  || GET_MODE (new) != GET_MODE (*xloc)
2761	  || (insn_code = recog_memoized (insn)) < 0
2762	  || insn_data[insn_code].n_dups > 0))
2763    validate_change (insn, xloc, new, 1);
2764  else
2765    *xloc = new;
2766}
2767
2768/* Canonicalize an expression:
2769   replace each register reference inside it
2770   with the "oldest" equivalent register.
2771
2772   If INSN is nonzero and we are replacing a pseudo with a hard register
2773   or vice versa, validate_change is used to ensure that INSN remains valid
2774   after we make our substitution.  The calls are made with IN_GROUP nonzero
2775   so apply_change_group must be called upon the outermost return from this
2776   function (unless INSN is zero).  The result of apply_change_group can
2777   generally be discarded since the changes we are making are optional.  */
2778
2779static rtx
2780canon_reg (rtx x, rtx insn)
2781{
2782  int i;
2783  enum rtx_code code;
2784  const char *fmt;
2785
2786  if (x == 0)
2787    return x;
2788
2789  code = GET_CODE (x);
2790  switch (code)
2791    {
2792    case PC:
2793    case CC0:
2794    case CONST:
2795    case CONST_INT:
2796    case CONST_DOUBLE:
2797    case CONST_VECTOR:
2798    case SYMBOL_REF:
2799    case LABEL_REF:
2800    case ADDR_VEC:
2801    case ADDR_DIFF_VEC:
2802      return x;
2803
2804    case REG:
2805      {
2806	int first;
2807	int q;
2808	struct qty_table_elem *ent;
2809
2810	/* Never replace a hard reg, because hard regs can appear
2811	   in more than one machine mode, and we must preserve the mode
2812	   of each occurrence.  Also, some hard regs appear in
2813	   MEMs that are shared and mustn't be altered.  Don't try to
2814	   replace any reg that maps to a reg of class NO_REGS.  */
2815	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2816	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2817	  return x;
2818
2819	q = REG_QTY (REGNO (x));
2820	ent = &qty_table[q];
2821	first = ent->first_reg;
2822	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2823		: REGNO_REG_CLASS (first) == NO_REGS ? x
2824		: gen_rtx_REG (ent->mode, first));
2825      }
2826
2827    default:
2828      break;
2829    }
2830
2831  fmt = GET_RTX_FORMAT (code);
2832  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2833    {
2834      int j;
2835
2836      if (fmt[i] == 'e')
2837	validate_canon_reg (&XEXP (x, i), insn);
2838      else if (fmt[i] == 'E')
2839	for (j = 0; j < XVECLEN (x, i); j++)
2840	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2841    }
2842
2843  return x;
2844}
2845
2846/* LOC is a location within INSN that is an operand address (the contents of
2847   a MEM).  Find the best equivalent address to use that is valid for this
2848   insn.
2849
2850   On most CISC machines, complicated address modes are costly, and rtx_cost
2851   is a good approximation for that cost.  However, most RISC machines have
2852   only a few (usually only one) memory reference formats.  If an address is
2853   valid at all, it is often just as cheap as any other address.  Hence, for
2854   RISC machines, we use `address_cost' to compare the costs of various
2855   addresses.  For two addresses of equal cost, choose the one with the
2856   highest `rtx_cost' value as that has the potential of eliminating the
2857   most insns.  For equal costs, we choose the first in the equivalence
2858   class.  Note that we ignore the fact that pseudo registers are cheaper than
2859   hard registers here because we would also prefer the pseudo registers.  */
2860
2861static void
2862find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2863{
2864  struct table_elt *elt;
2865  rtx addr = *loc;
2866  struct table_elt *p;
2867  int found_better = 1;
2868  int save_do_not_record = do_not_record;
2869  int save_hash_arg_in_memory = hash_arg_in_memory;
2870  int addr_volatile;
2871  int regno;
2872  unsigned hash;
2873
2874  /* Do not try to replace constant addresses or addresses of local and
2875     argument slots.  These MEM expressions are made only once and inserted
2876     in many instructions, as well as being used to control symbol table
2877     output.  It is not safe to clobber them.
2878
2879     There are some uncommon cases where the address is already in a register
2880     for some reason, but we cannot take advantage of that because we have
2881     no easy way to unshare the MEM.  In addition, looking up all stack
2882     addresses is costly.  */
2883  if ((GET_CODE (addr) == PLUS
2884       && REG_P (XEXP (addr, 0))
2885       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2886       && (regno = REGNO (XEXP (addr, 0)),
2887	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2888	   || regno == ARG_POINTER_REGNUM))
2889      || (REG_P (addr)
2890	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2891	      || regno == HARD_FRAME_POINTER_REGNUM
2892	      || regno == ARG_POINTER_REGNUM))
2893      || CONSTANT_ADDRESS_P (addr))
2894    return;
2895
2896  /* If this address is not simply a register, try to fold it.  This will
2897     sometimes simplify the expression.  Many simplifications
2898     will not be valid, but some, usually applying the associative rule, will
2899     be valid and produce better code.  */
2900  if (!REG_P (addr))
2901    {
2902      rtx folded = canon_for_address (fold_rtx (addr, NULL_RTX));
2903
2904      if (folded != addr)
2905	{
2906	  int addr_folded_cost = address_cost (folded, mode);
2907	  int addr_cost = address_cost (addr, mode);
2908
2909	  if ((addr_folded_cost < addr_cost
2910	       || (addr_folded_cost == addr_cost
2911		   /* ??? The rtx_cost comparison is left over from an older
2912		      version of this code.  It is probably no longer helpful.*/
2913		   && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2914		       || approx_reg_cost (folded) < approx_reg_cost (addr))))
2915	      && validate_change (insn, loc, folded, 0))
2916	    addr = folded;
2917	}
2918    }
2919
2920  /* If this address is not in the hash table, we can't look for equivalences
2921     of the whole address.  Also, ignore if volatile.  */
2922
2923  do_not_record = 0;
2924  hash = HASH (addr, Pmode);
2925  addr_volatile = do_not_record;
2926  do_not_record = save_do_not_record;
2927  hash_arg_in_memory = save_hash_arg_in_memory;
2928
2929  if (addr_volatile)
2930    return;
2931
2932  elt = lookup (addr, hash, Pmode);
2933
2934  if (elt)
2935    {
2936      /* We need to find the best (under the criteria documented above) entry
2937	 in the class that is valid.  We use the `flag' field to indicate
2938	 choices that were invalid and iterate until we can't find a better
2939	 one that hasn't already been tried.  */
2940
2941      for (p = elt->first_same_value; p; p = p->next_same_value)
2942	p->flag = 0;
2943
2944      while (found_better)
2945	{
2946	  int best_addr_cost = address_cost (*loc, mode);
2947	  int best_rtx_cost = (elt->cost + 1) >> 1;
2948	  int exp_cost;
2949	  struct table_elt *best_elt = elt;
2950
2951	  found_better = 0;
2952	  for (p = elt->first_same_value; p; p = p->next_same_value)
2953	    if (! p->flag)
2954	      {
2955		if ((REG_P (p->exp)
2956		     || exp_equiv_p (p->exp, p->exp, 1, false))
2957		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2958			|| (exp_cost == best_addr_cost
2959			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
2960		  {
2961		    found_better = 1;
2962		    best_addr_cost = exp_cost;
2963		    best_rtx_cost = (p->cost + 1) >> 1;
2964		    best_elt = p;
2965		  }
2966	      }
2967
2968	  if (found_better)
2969	    {
2970	      if (validate_change (insn, loc,
2971				   canon_reg (copy_rtx (best_elt->exp),
2972					      NULL_RTX), 0))
2973		return;
2974	      else
2975		best_elt->flag = 1;
2976	    }
2977	}
2978    }
2979
2980  /* If the address is a binary operation with the first operand a register
2981     and the second a constant, do the same as above, but looking for
2982     equivalences of the register.  Then try to simplify before checking for
2983     the best address to use.  This catches a few cases:  First is when we
2984     have REG+const and the register is another REG+const.  We can often merge
2985     the constants and eliminate one insn and one register.  It may also be
2986     that a machine has a cheap REG+REG+const.  Finally, this improves the
2987     code on the Alpha for unaligned byte stores.  */
2988
2989  if (flag_expensive_optimizations
2990      && ARITHMETIC_P (*loc)
2991      && REG_P (XEXP (*loc, 0)))
2992    {
2993      rtx op1 = XEXP (*loc, 1);
2994
2995      do_not_record = 0;
2996      hash = HASH (XEXP (*loc, 0), Pmode);
2997      do_not_record = save_do_not_record;
2998      hash_arg_in_memory = save_hash_arg_in_memory;
2999
3000      elt = lookup (XEXP (*loc, 0), hash, Pmode);
3001      if (elt == 0)
3002	return;
3003
3004      /* We need to find the best (under the criteria documented above) entry
3005	 in the class that is valid.  We use the `flag' field to indicate
3006	 choices that were invalid and iterate until we can't find a better
3007	 one that hasn't already been tried.  */
3008
3009      for (p = elt->first_same_value; p; p = p->next_same_value)
3010	p->flag = 0;
3011
3012      while (found_better)
3013	{
3014	  int best_addr_cost = address_cost (*loc, mode);
3015	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
3016	  struct table_elt *best_elt = elt;
3017	  rtx best_rtx = *loc;
3018	  int count;
3019
3020	  /* This is at worst case an O(n^2) algorithm, so limit our search
3021	     to the first 32 elements on the list.  This avoids trouble
3022	     compiling code with very long basic blocks that can easily
3023	     call simplify_gen_binary so many times that we run out of
3024	     memory.  */
3025
3026	  found_better = 0;
3027	  for (p = elt->first_same_value, count = 0;
3028	       p && count < 32;
3029	       p = p->next_same_value, count++)
3030	    if (! p->flag
3031		&& (REG_P (p->exp)
3032		    || (GET_CODE (p->exp) != EXPR_LIST
3033			&& exp_equiv_p (p->exp, p->exp, 1, false))))
3034
3035	      {
3036		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3037					       p->exp, op1);
3038		int new_cost;
3039
3040		/* Get the canonical version of the address so we can accept
3041		   more.  */
3042		new = canon_for_address (new);
3043
3044		new_cost = address_cost (new, mode);
3045
3046		if (new_cost < best_addr_cost
3047		    || (new_cost == best_addr_cost
3048			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3049		  {
3050		    found_better = 1;
3051		    best_addr_cost = new_cost;
3052		    best_rtx_cost = (COST (new) + 1) >> 1;
3053		    best_elt = p;
3054		    best_rtx = new;
3055		  }
3056	      }
3057
3058	  if (found_better)
3059	    {
3060	      if (validate_change (insn, loc,
3061				   canon_reg (copy_rtx (best_rtx),
3062					      NULL_RTX), 0))
3063		return;
3064	      else
3065		best_elt->flag = 1;
3066	    }
3067	}
3068    }
3069}
3070
3071/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3072   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3073   what values are being compared.
3074
3075   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3076   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3077   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3078   compared to produce cc0.
3079
3080   The return value is the comparison operator and is either the code of
3081   A or the code corresponding to the inverse of the comparison.  */
3082
3083static enum rtx_code
3084find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3085		      enum machine_mode *pmode1, enum machine_mode *pmode2)
3086{
3087  rtx arg1, arg2;
3088
3089  arg1 = *parg1, arg2 = *parg2;
3090
3091  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3092
3093  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3094    {
3095      /* Set nonzero when we find something of interest.  */
3096      rtx x = 0;
3097      int reverse_code = 0;
3098      struct table_elt *p = 0;
3099
3100      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3101	 On machines with CC0, this is the only case that can occur, since
3102	 fold_rtx will return the COMPARE or item being compared with zero
3103	 when given CC0.  */
3104
3105      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3106	x = arg1;
3107
3108      /* If ARG1 is a comparison operator and CODE is testing for
3109	 STORE_FLAG_VALUE, get the inner arguments.  */
3110
3111      else if (COMPARISON_P (arg1))
3112	{
3113#ifdef FLOAT_STORE_FLAG_VALUE
3114	  REAL_VALUE_TYPE fsfv;
3115#endif
3116
3117	  if (code == NE
3118	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3119		  && code == LT && STORE_FLAG_VALUE == -1)
3120#ifdef FLOAT_STORE_FLAG_VALUE
3121	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3122		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3123		      REAL_VALUE_NEGATIVE (fsfv)))
3124#endif
3125	      )
3126	    x = arg1;
3127	  else if (code == EQ
3128		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3129		       && code == GE && STORE_FLAG_VALUE == -1)
3130#ifdef FLOAT_STORE_FLAG_VALUE
3131		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3132		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3133			   REAL_VALUE_NEGATIVE (fsfv)))
3134#endif
3135		   )
3136	    x = arg1, reverse_code = 1;
3137	}
3138
3139      /* ??? We could also check for
3140
3141	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3142
3143	 and related forms, but let's wait until we see them occurring.  */
3144
3145      if (x == 0)
3146	/* Look up ARG1 in the hash table and see if it has an equivalence
3147	   that lets us see what is being compared.  */
3148	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3149      if (p)
3150	{
3151	  p = p->first_same_value;
3152
3153	  /* If what we compare is already known to be constant, that is as
3154	     good as it gets.
3155	     We need to break the loop in this case, because otherwise we
3156	     can have an infinite loop when looking at a reg that is known
3157	     to be a constant which is the same as a comparison of a reg
3158	     against zero which appears later in the insn stream, which in
3159	     turn is constant and the same as the comparison of the first reg
3160	     against zero...  */
3161	  if (p->is_const)
3162	    break;
3163	}
3164
3165      for (; p; p = p->next_same_value)
3166	{
3167	  enum machine_mode inner_mode = GET_MODE (p->exp);
3168#ifdef FLOAT_STORE_FLAG_VALUE
3169	  REAL_VALUE_TYPE fsfv;
3170#endif
3171
3172	  /* If the entry isn't valid, skip it.  */
3173	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3174	    continue;
3175
3176	  if (GET_CODE (p->exp) == COMPARE
3177	      /* Another possibility is that this machine has a compare insn
3178		 that includes the comparison code.  In that case, ARG1 would
3179		 be equivalent to a comparison operation that would set ARG1 to
3180		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3181		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3182		 we must reverse ORIG_CODE.  On machine with a negative value
3183		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3184	      || ((code == NE
3185		   || (code == LT
3186		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3187		       && (GET_MODE_BITSIZE (inner_mode)
3188			   <= HOST_BITS_PER_WIDE_INT)
3189		       && (STORE_FLAG_VALUE
3190			   & ((HOST_WIDE_INT) 1
3191			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3192#ifdef FLOAT_STORE_FLAG_VALUE
3193		   || (code == LT
3194		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3195		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3196			   REAL_VALUE_NEGATIVE (fsfv)))
3197#endif
3198		   )
3199		  && COMPARISON_P (p->exp)))
3200	    {
3201	      x = p->exp;
3202	      break;
3203	    }
3204	  else if ((code == EQ
3205		    || (code == GE
3206			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3207			&& (GET_MODE_BITSIZE (inner_mode)
3208			    <= HOST_BITS_PER_WIDE_INT)
3209			&& (STORE_FLAG_VALUE
3210			    & ((HOST_WIDE_INT) 1
3211			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3212#ifdef FLOAT_STORE_FLAG_VALUE
3213		    || (code == GE
3214			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3215			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3216			    REAL_VALUE_NEGATIVE (fsfv)))
3217#endif
3218		    )
3219		   && COMPARISON_P (p->exp))
3220	    {
3221	      reverse_code = 1;
3222	      x = p->exp;
3223	      break;
3224	    }
3225
3226	  /* If this non-trapping address, e.g. fp + constant, the
3227	     equivalent is a better operand since it may let us predict
3228	     the value of the comparison.  */
3229	  else if (!rtx_addr_can_trap_p (p->exp))
3230	    {
3231	      arg1 = p->exp;
3232	      continue;
3233	    }
3234	}
3235
3236      /* If we didn't find a useful equivalence for ARG1, we are done.
3237	 Otherwise, set up for the next iteration.  */
3238      if (x == 0)
3239	break;
3240
3241      /* If we need to reverse the comparison, make sure that that is
3242	 possible -- we can't necessarily infer the value of GE from LT
3243	 with floating-point operands.  */
3244      if (reverse_code)
3245	{
3246	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3247	  if (reversed == UNKNOWN)
3248	    break;
3249	  else
3250	    code = reversed;
3251	}
3252      else if (COMPARISON_P (x))
3253	code = GET_CODE (x);
3254      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3255    }
3256
3257  /* Return our results.  Return the modes from before fold_rtx
3258     because fold_rtx might produce const_int, and then it's too late.  */
3259  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3260  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3261
3262  return code;
3263}
3264
3265/* Fold SUBREG.  */
3266
3267static rtx
3268fold_rtx_subreg (rtx x, rtx insn)
3269{
3270  enum machine_mode mode = GET_MODE (x);
3271  rtx folded_arg0;
3272  rtx const_arg0;
3273  rtx new;
3274
3275  /* See if we previously assigned a constant value to this SUBREG.  */
3276  if ((new = lookup_as_function (x, CONST_INT)) != 0
3277      || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3278    return new;
3279
3280  /* If this is a paradoxical SUBREG, we have no idea what value the
3281     extra bits would have.  However, if the operand is equivalent to
3282     a SUBREG whose operand is the same as our mode, and all the modes
3283     are within a word, we can just use the inner operand because
3284     these SUBREGs just say how to treat the register.
3285
3286     Similarly if we find an integer constant.  */
3287
3288  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3289    {
3290      enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3291      struct table_elt *elt;
3292
3293      if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3294	  && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3295	  && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3296			    imode)) != 0)
3297	for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3298	  {
3299	    if (CONSTANT_P (elt->exp)
3300		&& GET_MODE (elt->exp) == VOIDmode)
3301	      return elt->exp;
3302
3303	    if (GET_CODE (elt->exp) == SUBREG
3304		&& GET_MODE (SUBREG_REG (elt->exp)) == mode
3305		&& exp_equiv_p (elt->exp, elt->exp, 1, false))
3306	      return copy_rtx (SUBREG_REG (elt->exp));
3307	  }
3308
3309      return x;
3310    }
3311
3312  /* Fold SUBREG_REG.  If it changed, see if we can simplify the
3313     SUBREG.  We might be able to if the SUBREG is extracting a single
3314     word in an integral mode or extracting the low part.  */
3315
3316  folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3317  const_arg0 = equiv_constant (folded_arg0);
3318  if (const_arg0)
3319    folded_arg0 = const_arg0;
3320
3321  if (folded_arg0 != SUBREG_REG (x))
3322    {
3323      new = simplify_subreg (mode, folded_arg0,
3324			     GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3325      if (new)
3326	return new;
3327    }
3328
3329  if (REG_P (folded_arg0)
3330      && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3331    {
3332      struct table_elt *elt;
3333
3334      elt = lookup (folded_arg0,
3335		    HASH (folded_arg0, GET_MODE (folded_arg0)),
3336		    GET_MODE (folded_arg0));
3337
3338      if (elt)
3339	elt = elt->first_same_value;
3340
3341      if (subreg_lowpart_p (x))
3342	/* If this is a narrowing SUBREG and our operand is a REG, see
3343	   if we can find an equivalence for REG that is an arithmetic
3344	   operation in a wider mode where both operands are
3345	   paradoxical SUBREGs from objects of our result mode.  In
3346	   that case, we couldn-t report an equivalent value for that
3347	   operation, since we don't know what the extra bits will be.
3348	   But we can find an equivalence for this SUBREG by folding
3349	   that operation in the narrow mode.  This allows us to fold
3350	   arithmetic in narrow modes when the machine only supports
3351	   word-sized arithmetic.
3352
3353	   Also look for a case where we have a SUBREG whose operand
3354	   is the same as our result.  If both modes are smaller than
3355	   a word, we are simply interpreting a register in different
3356	   modes and we can use the inner value.  */
3357
3358	for (; elt; elt = elt->next_same_value)
3359	  {
3360	    enum rtx_code eltcode = GET_CODE (elt->exp);
3361
3362	    /* Just check for unary and binary operations.  */
3363	    if (UNARY_P (elt->exp)
3364		&& eltcode != SIGN_EXTEND
3365		&& eltcode != ZERO_EXTEND
3366		&& GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3367		&& GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3368		&& (GET_MODE_CLASS (mode)
3369		    == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3370	      {
3371		rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3372
3373		if (!REG_P (op0) && ! CONSTANT_P (op0))
3374		  op0 = fold_rtx (op0, NULL_RTX);
3375
3376		op0 = equiv_constant (op0);
3377		if (op0)
3378		  new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3379						  op0, mode);
3380	      }
3381	    else if (ARITHMETIC_P (elt->exp)
3382		     && eltcode != DIV && eltcode != MOD
3383		     && eltcode != UDIV && eltcode != UMOD
3384		     && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3385		     && eltcode != ROTATE && eltcode != ROTATERT
3386		     && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3387			  && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3388			      == mode))
3389			 || CONSTANT_P (XEXP (elt->exp, 0)))
3390		     && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3391			  && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3392			      == mode))
3393			 || CONSTANT_P (XEXP (elt->exp, 1))))
3394	      {
3395		rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3396		rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3397
3398		if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3399		  op0 = fold_rtx (op0, NULL_RTX);
3400
3401		if (op0)
3402		  op0 = equiv_constant (op0);
3403
3404		if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3405		  op1 = fold_rtx (op1, NULL_RTX);
3406
3407		if (op1)
3408		  op1 = equiv_constant (op1);
3409
3410		/* If we are looking for the low SImode part of
3411		   (ashift:DI c (const_int 32)), it doesn't work to
3412		   compute that in SImode, because a 32-bit shift in
3413		   SImode is unpredictable.  We know the value is
3414		   0.  */
3415		if (op0 && op1
3416		    && GET_CODE (elt->exp) == ASHIFT
3417		    && GET_CODE (op1) == CONST_INT
3418		    && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3419		  {
3420		    if (INTVAL (op1)
3421			< GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3422		      /* If the count fits in the inner mode's width,
3423			 but exceeds the outer mode's width, the value
3424			 will get truncated to 0 by the subreg.  */
3425		      new = CONST0_RTX (mode);
3426		    else
3427		      /* If the count exceeds even the inner mode's width,
3428			 don't fold this expression.  */
3429		      new = 0;
3430		  }
3431		else if (op0 && op1)
3432		  new = simplify_binary_operation (GET_CODE (elt->exp),
3433						   mode, op0, op1);
3434	      }
3435
3436	    else if (GET_CODE (elt->exp) == SUBREG
3437		     && GET_MODE (SUBREG_REG (elt->exp)) == mode
3438		     && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3439			 <= UNITS_PER_WORD)
3440		     && exp_equiv_p (elt->exp, elt->exp, 1, false))
3441	      new = copy_rtx (SUBREG_REG (elt->exp));
3442
3443	    if (new)
3444	      return new;
3445	  }
3446      else
3447	/* A SUBREG resulting from a zero extension may fold to zero
3448	   if it extracts higher bits than the ZERO_EXTEND's source
3449	   bits.  FIXME: if combine tried to, er, combine these
3450	   instructions, this transformation may be moved to
3451	   simplify_subreg.  */
3452	for (; elt; elt = elt->next_same_value)
3453	  {
3454	    if (GET_CODE (elt->exp) == ZERO_EXTEND
3455		&& subreg_lsb (x)
3456		>= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3457	      return CONST0_RTX (mode);
3458	  }
3459    }
3460
3461  return x;
3462}
3463
3464/* Fold MEM.  Not to be called directly, see fold_rtx_mem instead.  */
3465
3466static rtx
3467fold_rtx_mem_1 (rtx x, rtx insn)
3468{
3469  enum machine_mode mode = GET_MODE (x);
3470  rtx new;
3471
3472  /* If we are not actually processing an insn, don't try to find the
3473     best address.  Not only don't we care, but we could modify the
3474     MEM in an invalid way since we have no insn to validate
3475     against.  */
3476  if (insn != 0)
3477    find_best_addr (insn, &XEXP (x, 0), mode);
3478
3479  {
3480    /* Even if we don't fold in the insn itself, we can safely do so
3481       here, in hopes of getting a constant.  */
3482    rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3483    rtx base = 0;
3484    HOST_WIDE_INT offset = 0;
3485
3486    if (REG_P (addr)
3487	&& REGNO_QTY_VALID_P (REGNO (addr)))
3488      {
3489	int addr_q = REG_QTY (REGNO (addr));
3490	struct qty_table_elem *addr_ent = &qty_table[addr_q];
3491
3492	if (GET_MODE (addr) == addr_ent->mode
3493	    && addr_ent->const_rtx != NULL_RTX)
3494	  addr = addr_ent->const_rtx;
3495      }
3496
3497    /* Call target hook to avoid the effects of -fpic etc....  */
3498    addr = targetm.delegitimize_address (addr);
3499
3500    /* If address is constant, split it into a base and integer
3501       offset.  */
3502    if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3503      base = addr;
3504    else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3505	     && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3506      {
3507	base = XEXP (XEXP (addr, 0), 0);
3508	offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3509      }
3510    else if (GET_CODE (addr) == LO_SUM
3511	     && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3512      base = XEXP (addr, 1);
3513
3514    /* If this is a constant pool reference, we can fold it into its
3515       constant to allow better value tracking.  */
3516    if (base && GET_CODE (base) == SYMBOL_REF
3517	&& CONSTANT_POOL_ADDRESS_P (base))
3518      {
3519	rtx constant = get_pool_constant (base);
3520	enum machine_mode const_mode = get_pool_mode (base);
3521	rtx new;
3522
3523	if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3524	  {
3525	    constant_pool_entries_cost = COST (constant);
3526	    constant_pool_entries_regcost = approx_reg_cost (constant);
3527	  }
3528
3529	/* If we are loading the full constant, we have an
3530	   equivalence.  */
3531	if (offset == 0 && mode == const_mode)
3532	  return constant;
3533
3534	/* If this actually isn't a constant (weird!), we can't do
3535	   anything.  Otherwise, handle the two most common cases:
3536	   extracting a word from a multi-word constant, and
3537	   extracting the low-order bits.  Other cases don't seem
3538	   common enough to worry about.  */
3539	if (! CONSTANT_P (constant))
3540	  return x;
3541
3542	if (GET_MODE_CLASS (mode) == MODE_INT
3543	    && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3544	    && offset % UNITS_PER_WORD == 0
3545	    && (new = operand_subword (constant,
3546				       offset / UNITS_PER_WORD,
3547				       0, const_mode)) != 0)
3548	  return new;
3549
3550	if (((BYTES_BIG_ENDIAN
3551	      && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3552	     || (! BYTES_BIG_ENDIAN && offset == 0))
3553	    && (new = gen_lowpart (mode, constant)) != 0)
3554	  return new;
3555      }
3556
3557    /* If this is a reference to a label at a known position in a jump
3558       table, we also know its value.  */
3559    if (base && GET_CODE (base) == LABEL_REF)
3560      {
3561	rtx label = XEXP (base, 0);
3562	rtx table_insn = NEXT_INSN (label);
3563
3564	if (table_insn && JUMP_P (table_insn)
3565	    && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3566	  {
3567	    rtx table = PATTERN (table_insn);
3568
3569	    if (offset >= 0
3570		&& (offset / GET_MODE_SIZE (GET_MODE (table))
3571		    < XVECLEN (table, 0)))
3572	      {
3573		rtx label = XVECEXP
3574		  (table, 0, offset / GET_MODE_SIZE (GET_MODE (table)));
3575		rtx set;
3576
3577		/* If we have an insn that loads the label from the
3578		   jumptable into a reg, we don't want to set the reg
3579		   to the label, because this may cause a reference to
3580		   the label to remain after the label is removed in
3581		   some very obscure cases (PR middle-end/18628).  */
3582		if (!insn)
3583		  return label;
3584
3585		set = single_set (insn);
3586
3587		if (! set || SET_SRC (set) != x)
3588		  return x;
3589
3590		/* If it's a jump, it's safe to reference the label.  */
3591		if (SET_DEST (set) == pc_rtx)
3592		  return label;
3593
3594		return x;
3595	      }
3596	  }
3597	if (table_insn && JUMP_P (table_insn)
3598	    && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3599	  {
3600	    rtx table = PATTERN (table_insn);
3601
3602	    if (offset >= 0
3603		&& (offset / GET_MODE_SIZE (GET_MODE (table))
3604		    < XVECLEN (table, 1)))
3605	      {
3606		offset /= GET_MODE_SIZE (GET_MODE (table));
3607		new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3608				     XEXP (table, 0));
3609
3610		if (GET_MODE (table) != Pmode)
3611		  new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3612
3613		/* Indicate this is a constant.  This isn't a valid
3614		   form of CONST, but it will only be used to fold the
3615		   next insns and then discarded, so it should be
3616		   safe.
3617
3618		   Note this expression must be explicitly discarded,
3619		   by cse_insn, else it may end up in a REG_EQUAL note
3620		   and "escape" to cause problems elsewhere.  */
3621		return gen_rtx_CONST (GET_MODE (new), new);
3622	      }
3623	  }
3624      }
3625
3626    return x;
3627  }
3628}
3629
3630/* Fold MEM.  */
3631
3632static rtx
3633fold_rtx_mem (rtx x, rtx insn)
3634{
3635  /* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
3636     refuse to allow recursion of the latter past n levels.  This can
3637     happen because fold_rtx_mem will try to fold the address of the
3638     memory reference it is passed, i.e. conceptually throwing away
3639     the MEM and reinjecting the bare address into fold_rtx.  As a
3640     result, patterns like
3641
3642       set (reg1)
3643	   (plus (reg)
3644		 (mem (plus (reg2) (const_int))))
3645
3646       set (reg2)
3647	   (plus (reg)
3648		 (mem (plus (reg1) (const_int))))
3649
3650     will defeat any "first-order" short-circuit put in either
3651     function to prevent these infinite oscillations.
3652
3653     The heuristics for determining n is as follows: since each time
3654     it is invoked fold_rtx_mem throws away a MEM, and since MEMs
3655     are generically not nested, we assume that each invocation of
3656     fold_rtx_mem corresponds to a new "top-level" operand, i.e.
3657     the source or the destination of a SET.  So fold_rtx_mem is
3658     bound to stop or cycle before n recursions, n being the number
3659     of expressions recorded in the hash table.  We also leave some
3660     play to account for the initial steps.  */
3661
3662  static unsigned int depth;
3663  rtx ret;
3664
3665  if (depth > 3 + table_size)
3666    return x;
3667
3668  depth++;
3669  ret = fold_rtx_mem_1 (x, insn);
3670  depth--;
3671
3672  return ret;
3673}
3674
3675/* If X is a nontrivial arithmetic operation on an argument
3676   for which a constant value can be determined, return
3677   the result of operating on that value, as a constant.
3678   Otherwise, return X, possibly with one or more operands
3679   modified by recursive calls to this function.
3680
3681   If X is a register whose contents are known, we do NOT
3682   return those contents here.  equiv_constant is called to
3683   perform that task.
3684
3685   INSN is the insn that we may be modifying.  If it is 0, make a copy
3686   of X before modifying it.  */
3687
3688static rtx
3689fold_rtx (rtx x, rtx insn)
3690{
3691  enum rtx_code code;
3692  enum machine_mode mode;
3693  const char *fmt;
3694  int i;
3695  rtx new = 0;
3696  int copied = 0;
3697  int must_swap = 0;
3698
3699  /* Folded equivalents of first two operands of X.  */
3700  rtx folded_arg0;
3701  rtx folded_arg1;
3702
3703  /* Constant equivalents of first three operands of X;
3704     0 when no such equivalent is known.  */
3705  rtx const_arg0;
3706  rtx const_arg1;
3707  rtx const_arg2;
3708
3709  /* The mode of the first operand of X.  We need this for sign and zero
3710     extends.  */
3711  enum machine_mode mode_arg0;
3712
3713  if (x == 0)
3714    return x;
3715
3716  mode = GET_MODE (x);
3717  code = GET_CODE (x);
3718  switch (code)
3719    {
3720    case CONST:
3721    case CONST_INT:
3722    case CONST_DOUBLE:
3723    case CONST_VECTOR:
3724    case SYMBOL_REF:
3725    case LABEL_REF:
3726    case REG:
3727    case PC:
3728      /* No use simplifying an EXPR_LIST
3729	 since they are used only for lists of args
3730	 in a function call's REG_EQUAL note.  */
3731    case EXPR_LIST:
3732      return x;
3733
3734#ifdef HAVE_cc0
3735    case CC0:
3736      return prev_insn_cc0;
3737#endif
3738
3739    case SUBREG:
3740      return fold_rtx_subreg (x, insn);
3741
3742    case NOT:
3743    case NEG:
3744      /* If we have (NOT Y), see if Y is known to be (NOT Z).
3745	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3746      new = lookup_as_function (XEXP (x, 0), code);
3747      if (new)
3748	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3749      break;
3750
3751    case MEM:
3752      return fold_rtx_mem (x, insn);
3753
3754#ifdef NO_FUNCTION_CSE
3755    case CALL:
3756      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3757	return x;
3758      break;
3759#endif
3760
3761    case ASM_OPERANDS:
3762      if (insn)
3763	{
3764	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3765	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3766			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3767	}
3768      break;
3769
3770    default:
3771      break;
3772    }
3773
3774  const_arg0 = 0;
3775  const_arg1 = 0;
3776  const_arg2 = 0;
3777  mode_arg0 = VOIDmode;
3778
3779  /* Try folding our operands.
3780     Then see which ones have constant values known.  */
3781
3782  fmt = GET_RTX_FORMAT (code);
3783  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3784    if (fmt[i] == 'e')
3785      {
3786	rtx arg = XEXP (x, i);
3787	rtx folded_arg = arg, const_arg = 0;
3788	enum machine_mode mode_arg = GET_MODE (arg);
3789	rtx cheap_arg, expensive_arg;
3790	rtx replacements[2];
3791	int j;
3792	int old_cost = COST_IN (XEXP (x, i), code);
3793
3794	/* Most arguments are cheap, so handle them specially.  */
3795	switch (GET_CODE (arg))
3796	  {
3797	  case REG:
3798	    /* This is the same as calling equiv_constant; it is duplicated
3799	       here for speed.  */
3800	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3801	      {
3802		int arg_q = REG_QTY (REGNO (arg));
3803		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3804
3805		if (arg_ent->const_rtx != NULL_RTX
3806		    && !REG_P (arg_ent->const_rtx)
3807		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3808		  const_arg
3809		    = gen_lowpart (GET_MODE (arg),
3810					       arg_ent->const_rtx);
3811	      }
3812	    break;
3813
3814	  case CONST:
3815	  case CONST_INT:
3816	  case SYMBOL_REF:
3817	  case LABEL_REF:
3818	  case CONST_DOUBLE:
3819	  case CONST_VECTOR:
3820	    const_arg = arg;
3821	    break;
3822
3823#ifdef HAVE_cc0
3824	  case CC0:
3825	    folded_arg = prev_insn_cc0;
3826	    mode_arg = prev_insn_cc0_mode;
3827	    const_arg = equiv_constant (folded_arg);
3828	    break;
3829#endif
3830
3831	  default:
3832	    folded_arg = fold_rtx (arg, insn);
3833	    const_arg = equiv_constant (folded_arg);
3834	  }
3835
3836	/* For the first three operands, see if the operand
3837	   is constant or equivalent to a constant.  */
3838	switch (i)
3839	  {
3840	  case 0:
3841	    folded_arg0 = folded_arg;
3842	    const_arg0 = const_arg;
3843	    mode_arg0 = mode_arg;
3844	    break;
3845	  case 1:
3846	    folded_arg1 = folded_arg;
3847	    const_arg1 = const_arg;
3848	    break;
3849	  case 2:
3850	    const_arg2 = const_arg;
3851	    break;
3852	  }
3853
3854	/* Pick the least expensive of the folded argument and an
3855	   equivalent constant argument.  */
3856	if (const_arg == 0 || const_arg == folded_arg
3857	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3858	  cheap_arg = folded_arg, expensive_arg = const_arg;
3859	else
3860	  cheap_arg = const_arg, expensive_arg = folded_arg;
3861
3862	/* Try to replace the operand with the cheapest of the two
3863	   possibilities.  If it doesn't work and this is either of the first
3864	   two operands of a commutative operation, try swapping them.
3865	   If THAT fails, try the more expensive, provided it is cheaper
3866	   than what is already there.  */
3867
3868	if (cheap_arg == XEXP (x, i))
3869	  continue;
3870
3871	if (insn == 0 && ! copied)
3872	  {
3873	    x = copy_rtx (x);
3874	    copied = 1;
3875	  }
3876
3877	/* Order the replacements from cheapest to most expensive.  */
3878	replacements[0] = cheap_arg;
3879	replacements[1] = expensive_arg;
3880
3881	for (j = 0; j < 2 && replacements[j]; j++)
3882	  {
3883	    int new_cost = COST_IN (replacements[j], code);
3884
3885	    /* Stop if what existed before was cheaper.  Prefer constants
3886	       in the case of a tie.  */
3887	    if (new_cost > old_cost
3888		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3889	      break;
3890
3891	    /* It's not safe to substitute the operand of a conversion
3892	       operator with a constant, as the conversion's identity
3893	       depends upon the mode of its operand.  This optimization
3894	       is handled by the call to simplify_unary_operation.  */
3895	    if (GET_RTX_CLASS (code) == RTX_UNARY
3896		&& GET_MODE (replacements[j]) != mode_arg0
3897		&& (code == ZERO_EXTEND
3898		    || code == SIGN_EXTEND
3899		    || code == TRUNCATE
3900		    || code == FLOAT_TRUNCATE
3901		    || code == FLOAT_EXTEND
3902		    || code == FLOAT
3903		    || code == FIX
3904		    || code == UNSIGNED_FLOAT
3905		    || code == UNSIGNED_FIX))
3906	      continue;
3907
3908	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3909	      break;
3910
3911	    if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3912		|| GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3913	      {
3914		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3915		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3916
3917		if (apply_change_group ())
3918		  {
3919		    /* Swap them back to be invalid so that this loop can
3920		       continue and flag them to be swapped back later.  */
3921		    rtx tem;
3922
3923		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3924				       XEXP (x, 1) = tem;
3925		    must_swap = 1;
3926		    break;
3927		  }
3928	      }
3929	  }
3930      }
3931
3932    else
3933      {
3934	if (fmt[i] == 'E')
3935	  /* Don't try to fold inside of a vector of expressions.
3936	     Doing nothing is harmless.  */
3937	  {;}
3938      }
3939
3940  /* If a commutative operation, place a constant integer as the second
3941     operand unless the first operand is also a constant integer.  Otherwise,
3942     place any constant second unless the first operand is also a constant.  */
3943
3944  if (COMMUTATIVE_P (x))
3945    {
3946      if (must_swap
3947	  || swap_commutative_operands_p (const_arg0 ? const_arg0
3948						     : XEXP (x, 0),
3949					  const_arg1 ? const_arg1
3950						     : XEXP (x, 1)))
3951	{
3952	  rtx tem = XEXP (x, 0);
3953
3954	  if (insn == 0 && ! copied)
3955	    {
3956	      x = copy_rtx (x);
3957	      copied = 1;
3958	    }
3959
3960	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3961	  validate_change (insn, &XEXP (x, 1), tem, 1);
3962	  if (apply_change_group ())
3963	    {
3964	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3965	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3966	    }
3967	}
3968    }
3969
3970  /* If X is an arithmetic operation, see if we can simplify it.  */
3971
3972  switch (GET_RTX_CLASS (code))
3973    {
3974    case RTX_UNARY:
3975      {
3976	int is_const = 0;
3977
3978	/* We can't simplify extension ops unless we know the
3979	   original mode.  */
3980	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3981	    && mode_arg0 == VOIDmode)
3982	  break;
3983
3984	/* If we had a CONST, strip it off and put it back later if we
3985	   fold.  */
3986	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3987	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3988
3989	new = simplify_unary_operation (code, mode,
3990					const_arg0 ? const_arg0 : folded_arg0,
3991					mode_arg0);
3992	/* NEG of PLUS could be converted into MINUS, but that causes
3993	   expressions of the form
3994	   (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3995	   which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3996	   FIXME: those ports should be fixed.  */
3997	if (new != 0 && is_const
3998	    && GET_CODE (new) == PLUS
3999	    && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
4000		|| GET_CODE (XEXP (new, 0)) == LABEL_REF)
4001	    && GET_CODE (XEXP (new, 1)) == CONST_INT)
4002	  new = gen_rtx_CONST (mode, new);
4003      }
4004      break;
4005
4006    case RTX_COMPARE:
4007    case RTX_COMM_COMPARE:
4008      /* See what items are actually being compared and set FOLDED_ARG[01]
4009	 to those values and CODE to the actual comparison code.  If any are
4010	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
4011	 do anything if both operands are already known to be constant.  */
4012
4013      /* ??? Vector mode comparisons are not supported yet.  */
4014      if (VECTOR_MODE_P (mode))
4015	break;
4016
4017      if (const_arg0 == 0 || const_arg1 == 0)
4018	{
4019	  struct table_elt *p0, *p1;
4020	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4021	  enum machine_mode mode_arg1;
4022
4023#ifdef FLOAT_STORE_FLAG_VALUE
4024	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4025	    {
4026	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4027			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4028	      false_rtx = CONST0_RTX (mode);
4029	    }
4030#endif
4031
4032	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
4033				       &mode_arg0, &mode_arg1);
4034
4035	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
4036	     what kinds of things are being compared, so we can't do
4037	     anything with this comparison.  */
4038
4039	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
4040	    break;
4041
4042	  const_arg0 = equiv_constant (folded_arg0);
4043	  const_arg1 = equiv_constant (folded_arg1);
4044
4045	  /* If we do not now have two constants being compared, see
4046	     if we can nevertheless deduce some things about the
4047	     comparison.  */
4048	  if (const_arg0 == 0 || const_arg1 == 0)
4049	    {
4050	      /* Some addresses are known to be nonzero.  We don't know
4051		 their sign, but equality comparisons are known.  */
4052	      if (const_arg1 == const0_rtx
4053		  && nonzero_address_p (folded_arg0))
4054		{
4055		  if (code == EQ)
4056		    return false_rtx;
4057		  else if (code == NE)
4058		    return true_rtx;
4059		}
4060
4061	      /* See if the two operands are the same.  */
4062
4063	      if (folded_arg0 == folded_arg1
4064		  || (REG_P (folded_arg0)
4065		      && REG_P (folded_arg1)
4066		      && (REG_QTY (REGNO (folded_arg0))
4067			  == REG_QTY (REGNO (folded_arg1))))
4068		  || ((p0 = lookup (folded_arg0,
4069				    SAFE_HASH (folded_arg0, mode_arg0),
4070				    mode_arg0))
4071		      && (p1 = lookup (folded_arg1,
4072				       SAFE_HASH (folded_arg1, mode_arg0),
4073				       mode_arg0))
4074		      && p0->first_same_value == p1->first_same_value))
4075		{
4076		  /* Sadly two equal NaNs are not equivalent.  */
4077		  if (!HONOR_NANS (mode_arg0))
4078		    return ((code == EQ || code == LE || code == GE
4079			     || code == LEU || code == GEU || code == UNEQ
4080			     || code == UNLE || code == UNGE
4081			     || code == ORDERED)
4082			    ? true_rtx : false_rtx);
4083		  /* Take care for the FP compares we can resolve.  */
4084		  if (code == UNEQ || code == UNLE || code == UNGE)
4085		    return true_rtx;
4086		  if (code == LTGT || code == LT || code == GT)
4087		    return false_rtx;
4088		}
4089
4090	      /* If FOLDED_ARG0 is a register, see if the comparison we are
4091		 doing now is either the same as we did before or the reverse
4092		 (we only check the reverse if not floating-point).  */
4093	      else if (REG_P (folded_arg0))
4094		{
4095		  int qty = REG_QTY (REGNO (folded_arg0));
4096
4097		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4098		    {
4099		      struct qty_table_elem *ent = &qty_table[qty];
4100
4101		      if ((comparison_dominates_p (ent->comparison_code, code)
4102			   || (! FLOAT_MODE_P (mode_arg0)
4103			       && comparison_dominates_p (ent->comparison_code,
4104						          reverse_condition (code))))
4105			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
4106			      || (const_arg1
4107				  && rtx_equal_p (ent->comparison_const,
4108						  const_arg1))
4109			      || (REG_P (folded_arg1)
4110				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4111			return (comparison_dominates_p (ent->comparison_code, code)
4112				? true_rtx : false_rtx);
4113		    }
4114		}
4115	    }
4116	}
4117
4118      /* If we are comparing against zero, see if the first operand is
4119	 equivalent to an IOR with a constant.  If so, we may be able to
4120	 determine the result of this comparison.  */
4121
4122      if (const_arg1 == const0_rtx)
4123	{
4124	  rtx y = lookup_as_function (folded_arg0, IOR);
4125	  rtx inner_const;
4126
4127	  if (y != 0
4128	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4129	      && GET_CODE (inner_const) == CONST_INT
4130	      && INTVAL (inner_const) != 0)
4131	    {
4132	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4133	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4134			      && (INTVAL (inner_const)
4135				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4136	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4137
4138#ifdef FLOAT_STORE_FLAG_VALUE
4139	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4140		{
4141		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4142			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4143		  false_rtx = CONST0_RTX (mode);
4144		}
4145#endif
4146
4147	      switch (code)
4148		{
4149		case EQ:
4150		  return false_rtx;
4151		case NE:
4152		  return true_rtx;
4153		case LT:  case LE:
4154		  if (has_sign)
4155		    return true_rtx;
4156		  break;
4157		case GT:  case GE:
4158		  if (has_sign)
4159		    return false_rtx;
4160		  break;
4161		default:
4162		  break;
4163		}
4164	    }
4165	}
4166
4167      {
4168	rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4169	rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4170        new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4171      }
4172      break;
4173
4174    case RTX_BIN_ARITH:
4175    case RTX_COMM_ARITH:
4176      switch (code)
4177	{
4178	case PLUS:
4179	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4180	     with that LABEL_REF as its second operand.  If so, the result is
4181	     the first operand of that MINUS.  This handles switches with an
4182	     ADDR_DIFF_VEC table.  */
4183	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4184	    {
4185	      rtx y
4186		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4187		: lookup_as_function (folded_arg0, MINUS);
4188
4189	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4190		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4191		return XEXP (y, 0);
4192
4193	      /* Now try for a CONST of a MINUS like the above.  */
4194	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4195			: lookup_as_function (folded_arg0, CONST))) != 0
4196		  && GET_CODE (XEXP (y, 0)) == MINUS
4197		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4198		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4199		return XEXP (XEXP (y, 0), 0);
4200	    }
4201
4202	  /* Likewise if the operands are in the other order.  */
4203	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4204	    {
4205	      rtx y
4206		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4207		: lookup_as_function (folded_arg1, MINUS);
4208
4209	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4210		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4211		return XEXP (y, 0);
4212
4213	      /* Now try for a CONST of a MINUS like the above.  */
4214	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4215			: lookup_as_function (folded_arg1, CONST))) != 0
4216		  && GET_CODE (XEXP (y, 0)) == MINUS
4217		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4218		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4219		return XEXP (XEXP (y, 0), 0);
4220	    }
4221
4222	  /* If second operand is a register equivalent to a negative
4223	     CONST_INT, see if we can find a register equivalent to the
4224	     positive constant.  Make a MINUS if so.  Don't do this for
4225	     a non-negative constant since we might then alternate between
4226	     choosing positive and negative constants.  Having the positive
4227	     constant previously-used is the more common case.  Be sure
4228	     the resulting constant is non-negative; if const_arg1 were
4229	     the smallest negative number this would overflow: depending
4230	     on the mode, this would either just be the same value (and
4231	     hence not save anything) or be incorrect.  */
4232	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4233	      && INTVAL (const_arg1) < 0
4234	      /* This used to test
4235
4236	         -INTVAL (const_arg1) >= 0
4237
4238		 But The Sun V5.0 compilers mis-compiled that test.  So
4239		 instead we test for the problematic value in a more direct
4240		 manner and hope the Sun compilers get it correct.  */
4241	      && INTVAL (const_arg1) !=
4242	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4243	      && REG_P (folded_arg1))
4244	    {
4245	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4246	      struct table_elt *p
4247		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
4248
4249	      if (p)
4250		for (p = p->first_same_value; p; p = p->next_same_value)
4251		  if (REG_P (p->exp))
4252		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4253						canon_reg (p->exp, NULL_RTX));
4254	    }
4255	  goto from_plus;
4256
4257	case MINUS:
4258	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4259	     If so, produce (PLUS Z C2-C).  */
4260	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4261	    {
4262	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4263	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4264		return fold_rtx (plus_constant (copy_rtx (y),
4265						-INTVAL (const_arg1)),
4266				 NULL_RTX);
4267	    }
4268
4269	  /* Fall through.  */
4270
4271	from_plus:
4272	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4273	case IOR:     case AND:       case XOR:
4274	case MULT:
4275	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4276	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4277	     is known to be of similar form, we may be able to replace the
4278	     operation with a combined operation.  This may eliminate the
4279	     intermediate operation if every use is simplified in this way.
4280	     Note that the similar optimization done by combine.c only works
4281	     if the intermediate operation's result has only one reference.  */
4282
4283	  if (REG_P (folded_arg0)
4284	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4285	    {
4286	      int is_shift
4287		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4288	      rtx y, inner_const, new_const;
4289	      enum rtx_code associate_code;
4290
4291	      y = lookup_as_function (folded_arg0, code);
4292	      if (y == 0)
4293		break;
4294
4295	      /* If we have compiled a statement like
4296		 "if (x == (x & mask1))", and now are looking at
4297		 "x & mask2", we will have a case where the first operand
4298		 of Y is the same as our first operand.  Unless we detect
4299		 this case, an infinite loop will result.  */
4300	      if (XEXP (y, 0) == folded_arg0)
4301		break;
4302
4303	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
4304	      if (!inner_const || GET_CODE (inner_const) != CONST_INT)
4305		break;
4306
4307	      /* Don't associate these operations if they are a PLUS with the
4308		 same constant and it is a power of two.  These might be doable
4309		 with a pre- or post-increment.  Similarly for two subtracts of
4310		 identical powers of two with post decrement.  */
4311
4312	      if (code == PLUS && const_arg1 == inner_const
4313		  && ((HAVE_PRE_INCREMENT
4314			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4315		      || (HAVE_POST_INCREMENT
4316			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4317		      || (HAVE_PRE_DECREMENT
4318			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4319		      || (HAVE_POST_DECREMENT
4320			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4321		break;
4322
4323	      /* Compute the code used to compose the constants.  For example,
4324		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
4325
4326	      associate_code = (is_shift || code == MINUS ? PLUS : code);
4327
4328	      new_const = simplify_binary_operation (associate_code, mode,
4329						     const_arg1, inner_const);
4330
4331	      if (new_const == 0)
4332		break;
4333
4334	      /* If we are associating shift operations, don't let this
4335		 produce a shift of the size of the object or larger.
4336		 This could occur when we follow a sign-extend by a right
4337		 shift on a machine that does a sign-extend as a pair
4338		 of shifts.  */
4339
4340	      if (is_shift && GET_CODE (new_const) == CONST_INT
4341		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4342		{
4343		  /* As an exception, we can turn an ASHIFTRT of this
4344		     form into a shift of the number of bits - 1.  */
4345		  if (code == ASHIFTRT)
4346		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4347		  else
4348		    break;
4349		}
4350
4351	      y = copy_rtx (XEXP (y, 0));
4352
4353	      /* If Y contains our first operand (the most common way this
4354		 can happen is if Y is a MEM), we would do into an infinite
4355		 loop if we tried to fold it.  So don't in that case.  */
4356
4357	      if (! reg_mentioned_p (folded_arg0, y))
4358		y = fold_rtx (y, insn);
4359
4360	      return simplify_gen_binary (code, mode, y, new_const);
4361	    }
4362	  break;
4363
4364	case DIV:       case UDIV:
4365	  /* ??? The associative optimization performed immediately above is
4366	     also possible for DIV and UDIV using associate_code of MULT.
4367	     However, we would need extra code to verify that the
4368	     multiplication does not overflow, that is, there is no overflow
4369	     in the calculation of new_const.  */
4370	  break;
4371
4372	default:
4373	  break;
4374	}
4375
4376      new = simplify_binary_operation (code, mode,
4377				       const_arg0 ? const_arg0 : folded_arg0,
4378				       const_arg1 ? const_arg1 : folded_arg1);
4379      break;
4380
4381    case RTX_OBJ:
4382      /* (lo_sum (high X) X) is simply X.  */
4383      if (code == LO_SUM && const_arg0 != 0
4384	  && GET_CODE (const_arg0) == HIGH
4385	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4386	return const_arg1;
4387      break;
4388
4389    case RTX_TERNARY:
4390    case RTX_BITFIELD_OPS:
4391      new = simplify_ternary_operation (code, mode, mode_arg0,
4392					const_arg0 ? const_arg0 : folded_arg0,
4393					const_arg1 ? const_arg1 : folded_arg1,
4394					const_arg2 ? const_arg2 : XEXP (x, 2));
4395      break;
4396
4397    default:
4398      break;
4399    }
4400
4401  return new ? new : x;
4402}
4403
4404/* Return a constant value currently equivalent to X.
4405   Return 0 if we don't know one.  */
4406
4407static rtx
4408equiv_constant (rtx x)
4409{
4410  if (REG_P (x)
4411      && REGNO_QTY_VALID_P (REGNO (x)))
4412    {
4413      int x_q = REG_QTY (REGNO (x));
4414      struct qty_table_elem *x_ent = &qty_table[x_q];
4415
4416      if (x_ent->const_rtx)
4417	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4418    }
4419
4420  if (x == 0 || CONSTANT_P (x))
4421    return x;
4422
4423  /* If X is a MEM, try to fold it outside the context of any insn to see if
4424     it might be equivalent to a constant.  That handles the case where it
4425     is a constant-pool reference.  Then try to look it up in the hash table
4426     in case it is something whose value we have seen before.  */
4427
4428  if (MEM_P (x))
4429    {
4430      struct table_elt *elt;
4431
4432      x = fold_rtx (x, NULL_RTX);
4433      if (CONSTANT_P (x))
4434	return x;
4435
4436      elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4437      if (elt == 0)
4438	return 0;
4439
4440      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4441	if (elt->is_const && CONSTANT_P (elt->exp))
4442	  return elt->exp;
4443    }
4444
4445  return 0;
4446}
4447
4448/* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4449   branch.  It will be zero if not.
4450
4451   In certain cases, this can cause us to add an equivalence.  For example,
4452   if we are following the taken case of
4453	if (i == 2)
4454   we can add the fact that `i' and '2' are now equivalent.
4455
4456   In any case, we can record that this comparison was passed.  If the same
4457   comparison is seen later, we will know its value.  */
4458
4459static void
4460record_jump_equiv (rtx insn, int taken)
4461{
4462  int cond_known_true;
4463  rtx op0, op1;
4464  rtx set;
4465  enum machine_mode mode, mode0, mode1;
4466  int reversed_nonequality = 0;
4467  enum rtx_code code;
4468
4469  /* Ensure this is the right kind of insn.  */
4470  if (! any_condjump_p (insn))
4471    return;
4472  set = pc_set (insn);
4473
4474  /* See if this jump condition is known true or false.  */
4475  if (taken)
4476    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4477  else
4478    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4479
4480  /* Get the type of comparison being done and the operands being compared.
4481     If we had to reverse a non-equality condition, record that fact so we
4482     know that it isn't valid for floating-point.  */
4483  code = GET_CODE (XEXP (SET_SRC (set), 0));
4484  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4485  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4486
4487  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4488  if (! cond_known_true)
4489    {
4490      code = reversed_comparison_code_parts (code, op0, op1, insn);
4491
4492      /* Don't remember if we can't find the inverse.  */
4493      if (code == UNKNOWN)
4494	return;
4495    }
4496
4497  /* The mode is the mode of the non-constant.  */
4498  mode = mode0;
4499  if (mode1 != VOIDmode)
4500    mode = mode1;
4501
4502  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4503}
4504
4505/* Yet another form of subreg creation.  In this case, we want something in
4506   MODE, and we should assume OP has MODE iff it is naturally modeless.  */
4507
4508static rtx
4509record_jump_cond_subreg (enum machine_mode mode, rtx op)
4510{
4511  enum machine_mode op_mode = GET_MODE (op);
4512  if (op_mode == mode || op_mode == VOIDmode)
4513    return op;
4514  return lowpart_subreg (mode, op, op_mode);
4515}
4516
4517/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4518   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4519   Make any useful entries we can with that information.  Called from
4520   above function and called recursively.  */
4521
4522static void
4523record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4524		  rtx op1, int reversed_nonequality)
4525{
4526  unsigned op0_hash, op1_hash;
4527  int op0_in_memory, op1_in_memory;
4528  struct table_elt *op0_elt, *op1_elt;
4529
4530  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4531     we know that they are also equal in the smaller mode (this is also
4532     true for all smaller modes whether or not there is a SUBREG, but
4533     is not worth testing for with no SUBREG).  */
4534
4535  /* Note that GET_MODE (op0) may not equal MODE.  */
4536  if (code == EQ && GET_CODE (op0) == SUBREG
4537      && (GET_MODE_SIZE (GET_MODE (op0))
4538	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4539    {
4540      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4541      rtx tem = record_jump_cond_subreg (inner_mode, op1);
4542      if (tem)
4543	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4544			  reversed_nonequality);
4545    }
4546
4547  if (code == EQ && GET_CODE (op1) == SUBREG
4548      && (GET_MODE_SIZE (GET_MODE (op1))
4549	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4550    {
4551      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4552      rtx tem = record_jump_cond_subreg (inner_mode, op0);
4553      if (tem)
4554	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4555			  reversed_nonequality);
4556    }
4557
4558  /* Similarly, if this is an NE comparison, and either is a SUBREG
4559     making a smaller mode, we know the whole thing is also NE.  */
4560
4561  /* Note that GET_MODE (op0) may not equal MODE;
4562     if we test MODE instead, we can get an infinite recursion
4563     alternating between two modes each wider than MODE.  */
4564
4565  if (code == NE && GET_CODE (op0) == SUBREG
4566      && subreg_lowpart_p (op0)
4567      && (GET_MODE_SIZE (GET_MODE (op0))
4568	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4569    {
4570      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4571      rtx tem = record_jump_cond_subreg (inner_mode, op1);
4572      if (tem)
4573	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4574			  reversed_nonequality);
4575    }
4576
4577  if (code == NE && GET_CODE (op1) == SUBREG
4578      && subreg_lowpart_p (op1)
4579      && (GET_MODE_SIZE (GET_MODE (op1))
4580	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4581    {
4582      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4583      rtx tem = record_jump_cond_subreg (inner_mode, op0);
4584      if (tem)
4585	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4586			  reversed_nonequality);
4587    }
4588
4589  /* Hash both operands.  */
4590
4591  do_not_record = 0;
4592  hash_arg_in_memory = 0;
4593  op0_hash = HASH (op0, mode);
4594  op0_in_memory = hash_arg_in_memory;
4595
4596  if (do_not_record)
4597    return;
4598
4599  do_not_record = 0;
4600  hash_arg_in_memory = 0;
4601  op1_hash = HASH (op1, mode);
4602  op1_in_memory = hash_arg_in_memory;
4603
4604  if (do_not_record)
4605    return;
4606
4607  /* Look up both operands.  */
4608  op0_elt = lookup (op0, op0_hash, mode);
4609  op1_elt = lookup (op1, op1_hash, mode);
4610
4611  /* If both operands are already equivalent or if they are not in the
4612     table but are identical, do nothing.  */
4613  if ((op0_elt != 0 && op1_elt != 0
4614       && op0_elt->first_same_value == op1_elt->first_same_value)
4615      || op0 == op1 || rtx_equal_p (op0, op1))
4616    return;
4617
4618  /* If we aren't setting two things equal all we can do is save this
4619     comparison.   Similarly if this is floating-point.  In the latter
4620     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4621     If we record the equality, we might inadvertently delete code
4622     whose intent was to change -0 to +0.  */
4623
4624  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4625    {
4626      struct qty_table_elem *ent;
4627      int qty;
4628
4629      /* If we reversed a floating-point comparison, if OP0 is not a
4630	 register, or if OP1 is neither a register or constant, we can't
4631	 do anything.  */
4632
4633      if (!REG_P (op1))
4634	op1 = equiv_constant (op1);
4635
4636      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4637	  || !REG_P (op0) || op1 == 0)
4638	return;
4639
4640      /* Put OP0 in the hash table if it isn't already.  This gives it a
4641	 new quantity number.  */
4642      if (op0_elt == 0)
4643	{
4644	  if (insert_regs (op0, NULL, 0))
4645	    {
4646	      rehash_using_reg (op0);
4647	      op0_hash = HASH (op0, mode);
4648
4649	      /* If OP0 is contained in OP1, this changes its hash code
4650		 as well.  Faster to rehash than to check, except
4651		 for the simple case of a constant.  */
4652	      if (! CONSTANT_P (op1))
4653		op1_hash = HASH (op1,mode);
4654	    }
4655
4656	  op0_elt = insert (op0, NULL, op0_hash, mode);
4657	  op0_elt->in_memory = op0_in_memory;
4658	}
4659
4660      qty = REG_QTY (REGNO (op0));
4661      ent = &qty_table[qty];
4662
4663      ent->comparison_code = code;
4664      if (REG_P (op1))
4665	{
4666	  /* Look it up again--in case op0 and op1 are the same.  */
4667	  op1_elt = lookup (op1, op1_hash, mode);
4668
4669	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4670	  if (op1_elt == 0)
4671	    {
4672	      if (insert_regs (op1, NULL, 0))
4673		{
4674		  rehash_using_reg (op1);
4675		  op1_hash = HASH (op1, mode);
4676		}
4677
4678	      op1_elt = insert (op1, NULL, op1_hash, mode);
4679	      op1_elt->in_memory = op1_in_memory;
4680	    }
4681
4682	  ent->comparison_const = NULL_RTX;
4683	  ent->comparison_qty = REG_QTY (REGNO (op1));
4684	}
4685      else
4686	{
4687	  ent->comparison_const = op1;
4688	  ent->comparison_qty = -1;
4689	}
4690
4691      return;
4692    }
4693
4694  /* If either side is still missing an equivalence, make it now,
4695     then merge the equivalences.  */
4696
4697  if (op0_elt == 0)
4698    {
4699      if (insert_regs (op0, NULL, 0))
4700	{
4701	  rehash_using_reg (op0);
4702	  op0_hash = HASH (op0, mode);
4703	}
4704
4705      op0_elt = insert (op0, NULL, op0_hash, mode);
4706      op0_elt->in_memory = op0_in_memory;
4707    }
4708
4709  if (op1_elt == 0)
4710    {
4711      if (insert_regs (op1, NULL, 0))
4712	{
4713	  rehash_using_reg (op1);
4714	  op1_hash = HASH (op1, mode);
4715	}
4716
4717      op1_elt = insert (op1, NULL, op1_hash, mode);
4718      op1_elt->in_memory = op1_in_memory;
4719    }
4720
4721  merge_equiv_classes (op0_elt, op1_elt);
4722}
4723
4724/* CSE processing for one instruction.
4725   First simplify sources and addresses of all assignments
4726   in the instruction, using previously-computed equivalents values.
4727   Then install the new sources and destinations in the table
4728   of available values.
4729
4730   If LIBCALL_INSN is nonzero, don't record any equivalence made in
4731   the insn.  It means that INSN is inside libcall block.  In this
4732   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4733
4734/* Data on one SET contained in the instruction.  */
4735
4736struct set
4737{
4738  /* The SET rtx itself.  */
4739  rtx rtl;
4740  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4741  rtx src;
4742  /* The hash-table element for the SET_SRC of the SET.  */
4743  struct table_elt *src_elt;
4744  /* Hash value for the SET_SRC.  */
4745  unsigned src_hash;
4746  /* Hash value for the SET_DEST.  */
4747  unsigned dest_hash;
4748  /* The SET_DEST, with SUBREG, etc., stripped.  */
4749  rtx inner_dest;
4750  /* Nonzero if the SET_SRC is in memory.  */
4751  char src_in_memory;
4752  /* Nonzero if the SET_SRC contains something
4753     whose value cannot be predicted and understood.  */
4754  char src_volatile;
4755  /* Original machine mode, in case it becomes a CONST_INT.
4756     The size of this field should match the size of the mode
4757     field of struct rtx_def (see rtl.h).  */
4758  ENUM_BITFIELD(machine_mode) mode : 8;
4759  /* A constant equivalent for SET_SRC, if any.  */
4760  rtx src_const;
4761  /* Original SET_SRC value used for libcall notes.  */
4762  rtx orig_src;
4763  /* Hash value of constant equivalent for SET_SRC.  */
4764  unsigned src_const_hash;
4765  /* Table entry for constant equivalent for SET_SRC, if any.  */
4766  struct table_elt *src_const_elt;
4767  /* Table entry for the destination address.  */
4768  struct table_elt *dest_addr_elt;
4769};
4770
4771static void
4772cse_insn (rtx insn, rtx libcall_insn)
4773{
4774  rtx x = PATTERN (insn);
4775  int i;
4776  rtx tem;
4777  int n_sets = 0;
4778
4779#ifdef HAVE_cc0
4780  /* Records what this insn does to set CC0.  */
4781  rtx this_insn_cc0 = 0;
4782  enum machine_mode this_insn_cc0_mode = VOIDmode;
4783#endif
4784
4785  rtx src_eqv = 0;
4786  struct table_elt *src_eqv_elt = 0;
4787  int src_eqv_volatile = 0;
4788  int src_eqv_in_memory = 0;
4789  unsigned src_eqv_hash = 0;
4790
4791  struct set *sets = (struct set *) 0;
4792
4793  this_insn = insn;
4794
4795  /* Find all the SETs and CLOBBERs in this instruction.
4796     Record all the SETs in the array `set' and count them.
4797     Also determine whether there is a CLOBBER that invalidates
4798     all memory references, or all references at varying addresses.  */
4799
4800  if (CALL_P (insn))
4801    {
4802      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4803	{
4804	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4805	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4806	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4807	}
4808    }
4809
4810  if (GET_CODE (x) == SET)
4811    {
4812      sets = alloca (sizeof (struct set));
4813      sets[0].rtl = x;
4814
4815      /* Ignore SETs that are unconditional jumps.
4816	 They never need cse processing, so this does not hurt.
4817	 The reason is not efficiency but rather
4818	 so that we can test at the end for instructions
4819	 that have been simplified to unconditional jumps
4820	 and not be misled by unchanged instructions
4821	 that were unconditional jumps to begin with.  */
4822      if (SET_DEST (x) == pc_rtx
4823	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4824	;
4825
4826      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4827	 The hard function value register is used only once, to copy to
4828	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4829	 Ensure we invalidate the destination register.  On the 80386 no
4830	 other code would invalidate it since it is a fixed_reg.
4831	 We need not check the return of apply_change_group; see canon_reg.  */
4832
4833      else if (GET_CODE (SET_SRC (x)) == CALL)
4834	{
4835	  canon_reg (SET_SRC (x), insn);
4836	  apply_change_group ();
4837	  fold_rtx (SET_SRC (x), insn);
4838	  invalidate (SET_DEST (x), VOIDmode);
4839	}
4840      else
4841	n_sets = 1;
4842    }
4843  else if (GET_CODE (x) == PARALLEL)
4844    {
4845      int lim = XVECLEN (x, 0);
4846
4847      sets = alloca (lim * sizeof (struct set));
4848
4849      /* Find all regs explicitly clobbered in this insn,
4850	 and ensure they are not replaced with any other regs
4851	 elsewhere in this insn.
4852	 When a reg that is clobbered is also used for input,
4853	 we should presume that that is for a reason,
4854	 and we should not substitute some other register
4855	 which is not supposed to be clobbered.
4856	 Therefore, this loop cannot be merged into the one below
4857	 because a CALL may precede a CLOBBER and refer to the
4858	 value clobbered.  We must not let a canonicalization do
4859	 anything in that case.  */
4860      for (i = 0; i < lim; i++)
4861	{
4862	  rtx y = XVECEXP (x, 0, i);
4863	  if (GET_CODE (y) == CLOBBER)
4864	    {
4865	      rtx clobbered = XEXP (y, 0);
4866
4867	      if (REG_P (clobbered)
4868		  || GET_CODE (clobbered) == SUBREG)
4869		invalidate (clobbered, VOIDmode);
4870	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4871		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4872		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4873	    }
4874	}
4875
4876      for (i = 0; i < lim; i++)
4877	{
4878	  rtx y = XVECEXP (x, 0, i);
4879	  if (GET_CODE (y) == SET)
4880	    {
4881	      /* As above, we ignore unconditional jumps and call-insns and
4882		 ignore the result of apply_change_group.  */
4883	      if (GET_CODE (SET_SRC (y)) == CALL)
4884		{
4885		  canon_reg (SET_SRC (y), insn);
4886		  apply_change_group ();
4887		  fold_rtx (SET_SRC (y), insn);
4888		  invalidate (SET_DEST (y), VOIDmode);
4889		}
4890	      else if (SET_DEST (y) == pc_rtx
4891		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4892		;
4893	      else
4894		sets[n_sets++].rtl = y;
4895	    }
4896	  else if (GET_CODE (y) == CLOBBER)
4897	    {
4898	      /* If we clobber memory, canon the address.
4899		 This does nothing when a register is clobbered
4900		 because we have already invalidated the reg.  */
4901	      if (MEM_P (XEXP (y, 0)))
4902		canon_reg (XEXP (y, 0), NULL_RTX);
4903	    }
4904	  else if (GET_CODE (y) == USE
4905		   && ! (REG_P (XEXP (y, 0))
4906			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4907	    canon_reg (y, NULL_RTX);
4908	  else if (GET_CODE (y) == CALL)
4909	    {
4910	      /* The result of apply_change_group can be ignored; see
4911		 canon_reg.  */
4912	      canon_reg (y, insn);
4913	      apply_change_group ();
4914	      fold_rtx (y, insn);
4915	    }
4916	}
4917    }
4918  else if (GET_CODE (x) == CLOBBER)
4919    {
4920      if (MEM_P (XEXP (x, 0)))
4921	canon_reg (XEXP (x, 0), NULL_RTX);
4922    }
4923
4924  /* Canonicalize a USE of a pseudo register or memory location.  */
4925  else if (GET_CODE (x) == USE
4926	   && ! (REG_P (XEXP (x, 0))
4927		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4928    canon_reg (XEXP (x, 0), NULL_RTX);
4929  else if (GET_CODE (x) == CALL)
4930    {
4931      /* The result of apply_change_group can be ignored; see canon_reg.  */
4932      canon_reg (x, insn);
4933      apply_change_group ();
4934      fold_rtx (x, insn);
4935    }
4936
4937  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4938     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4939     is handled specially for this case, and if it isn't set, then there will
4940     be no equivalence for the destination.  */
4941  if (n_sets == 1 && REG_NOTES (insn) != 0
4942      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4943      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4944	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4945    {
4946      src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4947      XEXP (tem, 0) = src_eqv;
4948    }
4949
4950  /* Canonicalize sources and addresses of destinations.
4951     We do this in a separate pass to avoid problems when a MATCH_DUP is
4952     present in the insn pattern.  In that case, we want to ensure that
4953     we don't break the duplicate nature of the pattern.  So we will replace
4954     both operands at the same time.  Otherwise, we would fail to find an
4955     equivalent substitution in the loop calling validate_change below.
4956
4957     We used to suppress canonicalization of DEST if it appears in SRC,
4958     but we don't do this any more.  */
4959
4960  for (i = 0; i < n_sets; i++)
4961    {
4962      rtx dest = SET_DEST (sets[i].rtl);
4963      rtx src = SET_SRC (sets[i].rtl);
4964      rtx new = canon_reg (src, insn);
4965      int insn_code;
4966
4967      sets[i].orig_src = src;
4968      if ((REG_P (new) && REG_P (src)
4969	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4970	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4971	  || (insn_code = recog_memoized (insn)) < 0
4972	  || insn_data[insn_code].n_dups > 0)
4973	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4974      else
4975	SET_SRC (sets[i].rtl) = new;
4976
4977      if (GET_CODE (dest) == ZERO_EXTRACT)
4978	{
4979	  validate_change (insn, &XEXP (dest, 1),
4980			   canon_reg (XEXP (dest, 1), insn), 1);
4981	  validate_change (insn, &XEXP (dest, 2),
4982			   canon_reg (XEXP (dest, 2), insn), 1);
4983	}
4984
4985      while (GET_CODE (dest) == SUBREG
4986	     || GET_CODE (dest) == ZERO_EXTRACT
4987	     || GET_CODE (dest) == STRICT_LOW_PART)
4988	dest = XEXP (dest, 0);
4989
4990      if (MEM_P (dest))
4991	canon_reg (dest, insn);
4992    }
4993
4994  /* Now that we have done all the replacements, we can apply the change
4995     group and see if they all work.  Note that this will cause some
4996     canonicalizations that would have worked individually not to be applied
4997     because some other canonicalization didn't work, but this should not
4998     occur often.
4999
5000     The result of apply_change_group can be ignored; see canon_reg.  */
5001
5002  apply_change_group ();
5003
5004  /* Set sets[i].src_elt to the class each source belongs to.
5005     Detect assignments from or to volatile things
5006     and set set[i] to zero so they will be ignored
5007     in the rest of this function.
5008
5009     Nothing in this loop changes the hash table or the register chains.  */
5010
5011  for (i = 0; i < n_sets; i++)
5012    {
5013      rtx src, dest;
5014      rtx src_folded;
5015      struct table_elt *elt = 0, *p;
5016      enum machine_mode mode;
5017      rtx src_eqv_here;
5018      rtx src_const = 0;
5019      rtx src_related = 0;
5020      struct table_elt *src_const_elt = 0;
5021      int src_cost = MAX_COST;
5022      int src_eqv_cost = MAX_COST;
5023      int src_folded_cost = MAX_COST;
5024      int src_related_cost = MAX_COST;
5025      int src_elt_cost = MAX_COST;
5026      int src_regcost = MAX_COST;
5027      int src_eqv_regcost = MAX_COST;
5028      int src_folded_regcost = MAX_COST;
5029      int src_related_regcost = MAX_COST;
5030      int src_elt_regcost = MAX_COST;
5031      /* Set nonzero if we need to call force_const_mem on with the
5032	 contents of src_folded before using it.  */
5033      int src_folded_force_flag = 0;
5034
5035      dest = SET_DEST (sets[i].rtl);
5036      src = SET_SRC (sets[i].rtl);
5037
5038      /* If SRC is a constant that has no machine mode,
5039	 hash it with the destination's machine mode.
5040	 This way we can keep different modes separate.  */
5041
5042      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5043      sets[i].mode = mode;
5044
5045      if (src_eqv)
5046	{
5047	  enum machine_mode eqvmode = mode;
5048	  if (GET_CODE (dest) == STRICT_LOW_PART)
5049	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5050	  do_not_record = 0;
5051	  hash_arg_in_memory = 0;
5052	  src_eqv_hash = HASH (src_eqv, eqvmode);
5053
5054	  /* Find the equivalence class for the equivalent expression.  */
5055
5056	  if (!do_not_record)
5057	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5058
5059	  src_eqv_volatile = do_not_record;
5060	  src_eqv_in_memory = hash_arg_in_memory;
5061	}
5062
5063      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5064	 value of the INNER register, not the destination.  So it is not
5065	 a valid substitution for the source.  But save it for later.  */
5066      if (GET_CODE (dest) == STRICT_LOW_PART)
5067	src_eqv_here = 0;
5068      else
5069	src_eqv_here = src_eqv;
5070
5071      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5072	 simplified result, which may not necessarily be valid.  */
5073      src_folded = fold_rtx (src, insn);
5074
5075#if 0
5076      /* ??? This caused bad code to be generated for the m68k port with -O2.
5077	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5078	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5079	 At the end we will add src and src_const to the same equivalence
5080	 class.  We now have 3 and -1 on the same equivalence class.  This
5081	 causes later instructions to be mis-optimized.  */
5082      /* If storing a constant in a bitfield, pre-truncate the constant
5083	 so we will be able to record it later.  */
5084      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5085	{
5086	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5087
5088	  if (GET_CODE (src) == CONST_INT
5089	      && GET_CODE (width) == CONST_INT
5090	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5091	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5092	    src_folded
5093	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5094					  << INTVAL (width)) - 1));
5095	}
5096#endif
5097
5098      /* Compute SRC's hash code, and also notice if it
5099	 should not be recorded at all.  In that case,
5100	 prevent any further processing of this assignment.  */
5101      do_not_record = 0;
5102      hash_arg_in_memory = 0;
5103
5104      sets[i].src = src;
5105      sets[i].src_hash = HASH (src, mode);
5106      sets[i].src_volatile = do_not_record;
5107      sets[i].src_in_memory = hash_arg_in_memory;
5108
5109      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5110	 a pseudo, do not record SRC.  Using SRC as a replacement for
5111	 anything else will be incorrect in that situation.  Note that
5112	 this usually occurs only for stack slots, in which case all the
5113	 RTL would be referring to SRC, so we don't lose any optimization
5114	 opportunities by not having SRC in the hash table.  */
5115
5116      if (MEM_P (src)
5117	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5118	  && REG_P (dest)
5119	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5120	sets[i].src_volatile = 1;
5121
5122#if 0
5123      /* It is no longer clear why we used to do this, but it doesn't
5124	 appear to still be needed.  So let's try without it since this
5125	 code hurts cse'ing widened ops.  */
5126      /* If source is a paradoxical subreg (such as QI treated as an SI),
5127	 treat it as volatile.  It may do the work of an SI in one context
5128	 where the extra bits are not being used, but cannot replace an SI
5129	 in general.  */
5130      if (GET_CODE (src) == SUBREG
5131	  && (GET_MODE_SIZE (GET_MODE (src))
5132	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5133	sets[i].src_volatile = 1;
5134#endif
5135
5136      /* Locate all possible equivalent forms for SRC.  Try to replace
5137         SRC in the insn with each cheaper equivalent.
5138
5139         We have the following types of equivalents: SRC itself, a folded
5140         version, a value given in a REG_EQUAL note, or a value related
5141	 to a constant.
5142
5143         Each of these equivalents may be part of an additional class
5144         of equivalents (if more than one is in the table, they must be in
5145         the same class; we check for this).
5146
5147	 If the source is volatile, we don't do any table lookups.
5148
5149         We note any constant equivalent for possible later use in a
5150         REG_NOTE.  */
5151
5152      if (!sets[i].src_volatile)
5153	elt = lookup (src, sets[i].src_hash, mode);
5154
5155      sets[i].src_elt = elt;
5156
5157      if (elt && src_eqv_here && src_eqv_elt)
5158	{
5159	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5160	    {
5161	      /* The REG_EQUAL is indicating that two formerly distinct
5162		 classes are now equivalent.  So merge them.  */
5163	      merge_equiv_classes (elt, src_eqv_elt);
5164	      src_eqv_hash = HASH (src_eqv, elt->mode);
5165	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5166	    }
5167
5168	  src_eqv_here = 0;
5169	}
5170
5171      else if (src_eqv_elt)
5172	elt = src_eqv_elt;
5173
5174      /* Try to find a constant somewhere and record it in `src_const'.
5175	 Record its table element, if any, in `src_const_elt'.  Look in
5176	 any known equivalences first.  (If the constant is not in the
5177	 table, also set `sets[i].src_const_hash').  */
5178      if (elt)
5179	for (p = elt->first_same_value; p; p = p->next_same_value)
5180	  if (p->is_const)
5181	    {
5182	      src_const = p->exp;
5183	      src_const_elt = elt;
5184	      break;
5185	    }
5186
5187      if (src_const == 0
5188	  && (CONSTANT_P (src_folded)
5189	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5190		 "constant" here so we will record it. This allows us
5191		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5192	      || (GET_CODE (src_folded) == MINUS
5193		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5194		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5195	src_const = src_folded, src_const_elt = elt;
5196      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5197	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5198
5199      /* If we don't know if the constant is in the table, get its
5200	 hash code and look it up.  */
5201      if (src_const && src_const_elt == 0)
5202	{
5203	  sets[i].src_const_hash = HASH (src_const, mode);
5204	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5205	}
5206
5207      sets[i].src_const = src_const;
5208      sets[i].src_const_elt = src_const_elt;
5209
5210      /* If the constant and our source are both in the table, mark them as
5211	 equivalent.  Otherwise, if a constant is in the table but the source
5212	 isn't, set ELT to it.  */
5213      if (src_const_elt && elt
5214	  && src_const_elt->first_same_value != elt->first_same_value)
5215	merge_equiv_classes (elt, src_const_elt);
5216      else if (src_const_elt && elt == 0)
5217	elt = src_const_elt;
5218
5219      /* See if there is a register linearly related to a constant
5220         equivalent of SRC.  */
5221      if (src_const
5222	  && (GET_CODE (src_const) == CONST
5223	      || (src_const_elt && src_const_elt->related_value != 0)))
5224	{
5225	  src_related = use_related_value (src_const, src_const_elt);
5226	  if (src_related)
5227	    {
5228	      struct table_elt *src_related_elt
5229		= lookup (src_related, HASH (src_related, mode), mode);
5230	      if (src_related_elt && elt)
5231		{
5232		  if (elt->first_same_value
5233		      != src_related_elt->first_same_value)
5234		    /* This can occur when we previously saw a CONST
5235		       involving a SYMBOL_REF and then see the SYMBOL_REF
5236		       twice.  Merge the involved classes.  */
5237		    merge_equiv_classes (elt, src_related_elt);
5238
5239		  src_related = 0;
5240		  src_related_elt = 0;
5241		}
5242	      else if (src_related_elt && elt == 0)
5243		elt = src_related_elt;
5244	    }
5245	}
5246
5247      /* See if we have a CONST_INT that is already in a register in a
5248	 wider mode.  */
5249
5250      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5251	  && GET_MODE_CLASS (mode) == MODE_INT
5252	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5253	{
5254	  enum machine_mode wider_mode;
5255
5256	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5257	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5258	       && src_related == 0;
5259	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5260	    {
5261	      struct table_elt *const_elt
5262		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5263
5264	      if (const_elt == 0)
5265		continue;
5266
5267	      for (const_elt = const_elt->first_same_value;
5268		   const_elt; const_elt = const_elt->next_same_value)
5269		if (REG_P (const_elt->exp))
5270		  {
5271		    src_related = gen_lowpart (mode,
5272							   const_elt->exp);
5273		    break;
5274		  }
5275	    }
5276	}
5277
5278      /* Another possibility is that we have an AND with a constant in
5279	 a mode narrower than a word.  If so, it might have been generated
5280	 as part of an "if" which would narrow the AND.  If we already
5281	 have done the AND in a wider mode, we can use a SUBREG of that
5282	 value.  */
5283
5284      if (flag_expensive_optimizations && ! src_related
5285	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5286	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5287	{
5288	  enum machine_mode tmode;
5289	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5290
5291	  for (tmode = GET_MODE_WIDER_MODE (mode);
5292	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5293	       tmode = GET_MODE_WIDER_MODE (tmode))
5294	    {
5295	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5296	      struct table_elt *larger_elt;
5297
5298	      if (inner)
5299		{
5300		  PUT_MODE (new_and, tmode);
5301		  XEXP (new_and, 0) = inner;
5302		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5303		  if (larger_elt == 0)
5304		    continue;
5305
5306		  for (larger_elt = larger_elt->first_same_value;
5307		       larger_elt; larger_elt = larger_elt->next_same_value)
5308		    if (REG_P (larger_elt->exp))
5309		      {
5310			src_related
5311			  = gen_lowpart (mode, larger_elt->exp);
5312			break;
5313		      }
5314
5315		  if (src_related)
5316		    break;
5317		}
5318	    }
5319	}
5320
5321#ifdef LOAD_EXTEND_OP
5322      /* See if a MEM has already been loaded with a widening operation;
5323	 if it has, we can use a subreg of that.  Many CISC machines
5324	 also have such operations, but this is only likely to be
5325	 beneficial on these machines.  */
5326
5327      if (flag_expensive_optimizations && src_related == 0
5328	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5329	  && GET_MODE_CLASS (mode) == MODE_INT
5330	  && MEM_P (src) && ! do_not_record
5331	  && LOAD_EXTEND_OP (mode) != UNKNOWN)
5332	{
5333	  struct rtx_def memory_extend_buf;
5334	  rtx memory_extend_rtx = &memory_extend_buf;
5335	  enum machine_mode tmode;
5336
5337	  /* Set what we are trying to extend and the operation it might
5338	     have been extended with.  */
5339	  memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5340	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5341	  XEXP (memory_extend_rtx, 0) = src;
5342
5343	  for (tmode = GET_MODE_WIDER_MODE (mode);
5344	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5345	       tmode = GET_MODE_WIDER_MODE (tmode))
5346	    {
5347	      struct table_elt *larger_elt;
5348
5349	      PUT_MODE (memory_extend_rtx, tmode);
5350	      larger_elt = lookup (memory_extend_rtx,
5351				   HASH (memory_extend_rtx, tmode), tmode);
5352	      if (larger_elt == 0)
5353		continue;
5354
5355	      for (larger_elt = larger_elt->first_same_value;
5356		   larger_elt; larger_elt = larger_elt->next_same_value)
5357		if (REG_P (larger_elt->exp))
5358		  {
5359		    src_related = gen_lowpart (mode,
5360							   larger_elt->exp);
5361		    break;
5362		  }
5363
5364	      if (src_related)
5365		break;
5366	    }
5367	}
5368#endif /* LOAD_EXTEND_OP */
5369
5370      if (src == src_folded)
5371	src_folded = 0;
5372
5373      /* At this point, ELT, if nonzero, points to a class of expressions
5374         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5375	 and SRC_RELATED, if nonzero, each contain additional equivalent
5376	 expressions.  Prune these latter expressions by deleting expressions
5377	 already in the equivalence class.
5378
5379	 Check for an equivalent identical to the destination.  If found,
5380	 this is the preferred equivalent since it will likely lead to
5381	 elimination of the insn.  Indicate this by placing it in
5382	 `src_related'.  */
5383
5384      if (elt)
5385	elt = elt->first_same_value;
5386      for (p = elt; p; p = p->next_same_value)
5387	{
5388	  enum rtx_code code = GET_CODE (p->exp);
5389
5390	  /* If the expression is not valid, ignore it.  Then we do not
5391	     have to check for validity below.  In most cases, we can use
5392	     `rtx_equal_p', since canonicalization has already been done.  */
5393	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5394	    continue;
5395
5396	  /* Also skip paradoxical subregs, unless that's what we're
5397	     looking for.  */
5398	  if (code == SUBREG
5399	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5400		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5401	      && ! (src != 0
5402		    && GET_CODE (src) == SUBREG
5403		    && GET_MODE (src) == GET_MODE (p->exp)
5404		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5405			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5406	    continue;
5407
5408	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5409	    src = 0;
5410	  else if (src_folded && GET_CODE (src_folded) == code
5411		   && rtx_equal_p (src_folded, p->exp))
5412	    src_folded = 0;
5413	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5414		   && rtx_equal_p (src_eqv_here, p->exp))
5415	    src_eqv_here = 0;
5416	  else if (src_related && GET_CODE (src_related) == code
5417		   && rtx_equal_p (src_related, p->exp))
5418	    src_related = 0;
5419
5420	  /* This is the same as the destination of the insns, we want
5421	     to prefer it.  Copy it to src_related.  The code below will
5422	     then give it a negative cost.  */
5423	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5424	    src_related = dest;
5425	}
5426
5427      /* Find the cheapest valid equivalent, trying all the available
5428         possibilities.  Prefer items not in the hash table to ones
5429         that are when they are equal cost.  Note that we can never
5430         worsen an insn as the current contents will also succeed.
5431	 If we find an equivalent identical to the destination, use it as best,
5432	 since this insn will probably be eliminated in that case.  */
5433      if (src)
5434	{
5435	  if (rtx_equal_p (src, dest))
5436	    src_cost = src_regcost = -1;
5437	  else
5438	    {
5439	      src_cost = COST (src);
5440	      src_regcost = approx_reg_cost (src);
5441	    }
5442	}
5443
5444      if (src_eqv_here)
5445	{
5446	  if (rtx_equal_p (src_eqv_here, dest))
5447	    src_eqv_cost = src_eqv_regcost = -1;
5448	  else
5449	    {
5450	      src_eqv_cost = COST (src_eqv_here);
5451	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5452	    }
5453	}
5454
5455      if (src_folded)
5456	{
5457	  if (rtx_equal_p (src_folded, dest))
5458	    src_folded_cost = src_folded_regcost = -1;
5459	  else
5460	    {
5461	      src_folded_cost = COST (src_folded);
5462	      src_folded_regcost = approx_reg_cost (src_folded);
5463	    }
5464	}
5465
5466      if (src_related)
5467	{
5468	  if (rtx_equal_p (src_related, dest))
5469	    src_related_cost = src_related_regcost = -1;
5470	  else
5471	    {
5472	      src_related_cost = COST (src_related);
5473	      src_related_regcost = approx_reg_cost (src_related);
5474	    }
5475	}
5476
5477      /* If this was an indirect jump insn, a known label will really be
5478	 cheaper even though it looks more expensive.  */
5479      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5480	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5481
5482      /* Terminate loop when replacement made.  This must terminate since
5483         the current contents will be tested and will always be valid.  */
5484      while (1)
5485	{
5486	  rtx trial;
5487
5488	  /* Skip invalid entries.  */
5489	  while (elt && !REG_P (elt->exp)
5490		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5491	    elt = elt->next_same_value;
5492
5493	  /* A paradoxical subreg would be bad here: it'll be the right
5494	     size, but later may be adjusted so that the upper bits aren't
5495	     what we want.  So reject it.  */
5496	  if (elt != 0
5497	      && GET_CODE (elt->exp) == SUBREG
5498	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5499		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5500	      /* It is okay, though, if the rtx we're trying to match
5501		 will ignore any of the bits we can't predict.  */
5502	      && ! (src != 0
5503		    && GET_CODE (src) == SUBREG
5504		    && GET_MODE (src) == GET_MODE (elt->exp)
5505		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5506			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5507	    {
5508	      elt = elt->next_same_value;
5509	      continue;
5510	    }
5511
5512	  if (elt)
5513	    {
5514	      src_elt_cost = elt->cost;
5515	      src_elt_regcost = elt->regcost;
5516	    }
5517
5518	  /* Find cheapest and skip it for the next time.   For items
5519	     of equal cost, use this order:
5520	     src_folded, src, src_eqv, src_related and hash table entry.  */
5521	  if (src_folded
5522	      && preferable (src_folded_cost, src_folded_regcost,
5523			     src_cost, src_regcost) <= 0
5524	      && preferable (src_folded_cost, src_folded_regcost,
5525			     src_eqv_cost, src_eqv_regcost) <= 0
5526	      && preferable (src_folded_cost, src_folded_regcost,
5527			     src_related_cost, src_related_regcost) <= 0
5528	      && preferable (src_folded_cost, src_folded_regcost,
5529			     src_elt_cost, src_elt_regcost) <= 0)
5530	    {
5531	      trial = src_folded, src_folded_cost = MAX_COST;
5532	      if (src_folded_force_flag)
5533		{
5534		  rtx forced = force_const_mem (mode, trial);
5535		  if (forced)
5536		    trial = forced;
5537		}
5538	    }
5539	  else if (src
5540		   && preferable (src_cost, src_regcost,
5541				  src_eqv_cost, src_eqv_regcost) <= 0
5542		   && preferable (src_cost, src_regcost,
5543				  src_related_cost, src_related_regcost) <= 0
5544		   && preferable (src_cost, src_regcost,
5545				  src_elt_cost, src_elt_regcost) <= 0)
5546	    trial = src, src_cost = MAX_COST;
5547	  else if (src_eqv_here
5548		   && preferable (src_eqv_cost, src_eqv_regcost,
5549				  src_related_cost, src_related_regcost) <= 0
5550		   && preferable (src_eqv_cost, src_eqv_regcost,
5551				  src_elt_cost, src_elt_regcost) <= 0)
5552	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5553	  else if (src_related
5554		   && preferable (src_related_cost, src_related_regcost,
5555				  src_elt_cost, src_elt_regcost) <= 0)
5556	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5557	  else
5558	    {
5559	      trial = copy_rtx (elt->exp);
5560	      elt = elt->next_same_value;
5561	      src_elt_cost = MAX_COST;
5562	    }
5563
5564	  /* We don't normally have an insn matching (set (pc) (pc)), so
5565	     check for this separately here.  We will delete such an
5566	     insn below.
5567
5568	     For other cases such as a table jump or conditional jump
5569	     where we know the ultimate target, go ahead and replace the
5570	     operand.  While that may not make a valid insn, we will
5571	     reemit the jump below (and also insert any necessary
5572	     barriers).  */
5573	  if (n_sets == 1 && dest == pc_rtx
5574	      && (trial == pc_rtx
5575		  || (GET_CODE (trial) == LABEL_REF
5576		      && ! condjump_p (insn))))
5577	    {
5578	      /* Don't substitute non-local labels, this confuses CFG.  */
5579	      if (GET_CODE (trial) == LABEL_REF
5580		  && LABEL_REF_NONLOCAL_P (trial))
5581		continue;
5582
5583	      SET_SRC (sets[i].rtl) = trial;
5584	      cse_jumps_altered = 1;
5585	      break;
5586	    }
5587
5588	  /* Reject certain invalid forms of CONST that we create.  */
5589	  else if (CONSTANT_P (trial)
5590		   && GET_CODE (trial) == CONST
5591		   /* Reject cases that will cause decode_rtx_const to
5592		      die.  On the alpha when simplifying a switch, we
5593		      get (const (truncate (minus (label_ref)
5594		      (label_ref)))).  */
5595		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5596		       /* Likewise on IA-64, except without the
5597			  truncate.  */
5598		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5599			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5600			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5601	    /* Do nothing for this case.  */
5602	    ;
5603
5604	  /* Look for a substitution that makes a valid insn.  */
5605	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5606	    {
5607	      rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5608
5609	      /* If we just made a substitution inside a libcall, then we
5610		 need to make the same substitution in any notes attached
5611		 to the RETVAL insn.  */
5612	      if (libcall_insn
5613		  && (REG_P (sets[i].orig_src)
5614		      || GET_CODE (sets[i].orig_src) == SUBREG
5615		      || MEM_P (sets[i].orig_src)))
5616		{
5617	          rtx note = find_reg_equal_equiv_note (libcall_insn);
5618		  if (note != 0)
5619		    XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5620							   sets[i].orig_src,
5621							   copy_rtx (new));
5622		}
5623
5624	      /* The result of apply_change_group can be ignored; see
5625		 canon_reg.  */
5626
5627	      validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5628	      apply_change_group ();
5629	      break;
5630	    }
5631
5632	  /* If we previously found constant pool entries for
5633	     constants and this is a constant, try making a
5634	     pool entry.  Put it in src_folded unless we already have done
5635	     this since that is where it likely came from.  */
5636
5637	  else if (constant_pool_entries_cost
5638		   && CONSTANT_P (trial)
5639		   && (src_folded == 0
5640		       || (!MEM_P (src_folded)
5641			   && ! src_folded_force_flag))
5642		   && GET_MODE_CLASS (mode) != MODE_CC
5643		   && mode != VOIDmode)
5644	    {
5645	      src_folded_force_flag = 1;
5646	      src_folded = trial;
5647	      src_folded_cost = constant_pool_entries_cost;
5648	      src_folded_regcost = constant_pool_entries_regcost;
5649	    }
5650	}
5651
5652      src = SET_SRC (sets[i].rtl);
5653
5654      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5655	 However, there is an important exception:  If both are registers
5656	 that are not the head of their equivalence class, replace SET_SRC
5657	 with the head of the class.  If we do not do this, we will have
5658	 both registers live over a portion of the basic block.  This way,
5659	 their lifetimes will likely abut instead of overlapping.  */
5660      if (REG_P (dest)
5661	  && REGNO_QTY_VALID_P (REGNO (dest)))
5662	{
5663	  int dest_q = REG_QTY (REGNO (dest));
5664	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5665
5666	  if (dest_ent->mode == GET_MODE (dest)
5667	      && dest_ent->first_reg != REGNO (dest)
5668	      && REG_P (src) && REGNO (src) == REGNO (dest)
5669	      /* Don't do this if the original insn had a hard reg as
5670		 SET_SRC or SET_DEST.  */
5671	      && (!REG_P (sets[i].src)
5672		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5673	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5674	    /* We can't call canon_reg here because it won't do anything if
5675	       SRC is a hard register.  */
5676	    {
5677	      int src_q = REG_QTY (REGNO (src));
5678	      struct qty_table_elem *src_ent = &qty_table[src_q];
5679	      int first = src_ent->first_reg;
5680	      rtx new_src
5681		= (first >= FIRST_PSEUDO_REGISTER
5682		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5683
5684	      /* We must use validate-change even for this, because this
5685		 might be a special no-op instruction, suitable only to
5686		 tag notes onto.  */
5687	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5688		{
5689		  src = new_src;
5690		  /* If we had a constant that is cheaper than what we are now
5691		     setting SRC to, use that constant.  We ignored it when we
5692		     thought we could make this into a no-op.  */
5693		  if (src_const && COST (src_const) < COST (src)
5694		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5695					  src_const, 0))
5696		    src = src_const;
5697		}
5698	    }
5699	}
5700
5701      /* If we made a change, recompute SRC values.  */
5702      if (src != sets[i].src)
5703	{
5704	  cse_altered = 1;
5705	  do_not_record = 0;
5706	  hash_arg_in_memory = 0;
5707	  sets[i].src = src;
5708	  sets[i].src_hash = HASH (src, mode);
5709	  sets[i].src_volatile = do_not_record;
5710	  sets[i].src_in_memory = hash_arg_in_memory;
5711	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5712	}
5713
5714      /* If this is a single SET, we are setting a register, and we have an
5715	 equivalent constant, we want to add a REG_NOTE.   We don't want
5716	 to write a REG_EQUAL note for a constant pseudo since verifying that
5717	 that pseudo hasn't been eliminated is a pain.  Such a note also
5718	 won't help anything.
5719
5720	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5721	 which can be created for a reference to a compile time computable
5722	 entry in a jump table.  */
5723
5724      if (n_sets == 1 && src_const && REG_P (dest)
5725	  && !REG_P (src_const)
5726	  && ! (GET_CODE (src_const) == CONST
5727		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5728		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5729		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5730	{
5731	  /* We only want a REG_EQUAL note if src_const != src.  */
5732	  if (! rtx_equal_p (src, src_const))
5733	    {
5734	      /* Make sure that the rtx is not shared.  */
5735	      src_const = copy_rtx (src_const);
5736
5737	      /* Record the actual constant value in a REG_EQUAL note,
5738		 making a new one if one does not already exist.  */
5739	      set_unique_reg_note (insn, REG_EQUAL, src_const);
5740	    }
5741	}
5742
5743      /* Now deal with the destination.  */
5744      do_not_record = 0;
5745
5746      /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5747      while (GET_CODE (dest) == SUBREG
5748	     || GET_CODE (dest) == ZERO_EXTRACT
5749	     || GET_CODE (dest) == STRICT_LOW_PART)
5750	dest = XEXP (dest, 0);
5751
5752      sets[i].inner_dest = dest;
5753
5754      if (MEM_P (dest))
5755	{
5756#ifdef PUSH_ROUNDING
5757	  /* Stack pushes invalidate the stack pointer.  */
5758	  rtx addr = XEXP (dest, 0);
5759	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5760	      && XEXP (addr, 0) == stack_pointer_rtx)
5761	    invalidate (stack_pointer_rtx, Pmode);
5762#endif
5763	  dest = fold_rtx (dest, insn);
5764	}
5765
5766      /* Compute the hash code of the destination now,
5767	 before the effects of this instruction are recorded,
5768	 since the register values used in the address computation
5769	 are those before this instruction.  */
5770      sets[i].dest_hash = HASH (dest, mode);
5771
5772      /* Don't enter a bit-field in the hash table
5773	 because the value in it after the store
5774	 may not equal what was stored, due to truncation.  */
5775
5776      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5777	{
5778	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5779
5780	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5781	      && GET_CODE (width) == CONST_INT
5782	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5783	      && ! (INTVAL (src_const)
5784		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5785	    /* Exception: if the value is constant,
5786	       and it won't be truncated, record it.  */
5787	    ;
5788	  else
5789	    {
5790	      /* This is chosen so that the destination will be invalidated
5791		 but no new value will be recorded.
5792		 We must invalidate because sometimes constant
5793		 values can be recorded for bitfields.  */
5794	      sets[i].src_elt = 0;
5795	      sets[i].src_volatile = 1;
5796	      src_eqv = 0;
5797	      src_eqv_elt = 0;
5798	    }
5799	}
5800
5801      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5802	 the insn.  */
5803      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5804	{
5805	  /* One less use of the label this insn used to jump to.  */
5806	  delete_insn (insn);
5807	  cse_jumps_altered = 1;
5808	  /* No more processing for this set.  */
5809	  sets[i].rtl = 0;
5810	}
5811
5812      /* If this SET is now setting PC to a label, we know it used to
5813	 be a conditional or computed branch.  */
5814      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5815	       && !LABEL_REF_NONLOCAL_P (src))
5816	{
5817	  /* Now emit a BARRIER after the unconditional jump.  */
5818	  if (NEXT_INSN (insn) == 0
5819	      || !BARRIER_P (NEXT_INSN (insn)))
5820	    emit_barrier_after (insn);
5821
5822	  /* We reemit the jump in as many cases as possible just in
5823	     case the form of an unconditional jump is significantly
5824	     different than a computed jump or conditional jump.
5825
5826	     If this insn has multiple sets, then reemitting the
5827	     jump is nontrivial.  So instead we just force rerecognition
5828	     and hope for the best.  */
5829	  if (n_sets == 1)
5830	    {
5831	      rtx new, note;
5832
5833	      new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5834	      JUMP_LABEL (new) = XEXP (src, 0);
5835	      LABEL_NUSES (XEXP (src, 0))++;
5836
5837	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5838	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5839	      if (note)
5840		{
5841		  XEXP (note, 1) = NULL_RTX;
5842		  REG_NOTES (new) = note;
5843		}
5844
5845	      delete_insn (insn);
5846	      insn = new;
5847
5848	      /* Now emit a BARRIER after the unconditional jump.  */
5849	      if (NEXT_INSN (insn) == 0
5850		  || !BARRIER_P (NEXT_INSN (insn)))
5851		emit_barrier_after (insn);
5852	    }
5853	  else
5854	    INSN_CODE (insn) = -1;
5855
5856	  /* Do not bother deleting any unreachable code,
5857	     let jump/flow do that.  */
5858
5859	  cse_jumps_altered = 1;
5860	  sets[i].rtl = 0;
5861	}
5862
5863      /* If destination is volatile, invalidate it and then do no further
5864	 processing for this assignment.  */
5865
5866      else if (do_not_record)
5867	{
5868	  if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5869	    invalidate (dest, VOIDmode);
5870	  else if (MEM_P (dest))
5871	    invalidate (dest, VOIDmode);
5872	  else if (GET_CODE (dest) == STRICT_LOW_PART
5873		   || GET_CODE (dest) == ZERO_EXTRACT)
5874	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5875	  sets[i].rtl = 0;
5876	}
5877
5878      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5879	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5880
5881#ifdef HAVE_cc0
5882      /* If setting CC0, record what it was set to, or a constant, if it
5883	 is equivalent to a constant.  If it is being set to a floating-point
5884	 value, make a COMPARE with the appropriate constant of 0.  If we
5885	 don't do this, later code can interpret this as a test against
5886	 const0_rtx, which can cause problems if we try to put it into an
5887	 insn as a floating-point operand.  */
5888      if (dest == cc0_rtx)
5889	{
5890	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5891	  this_insn_cc0_mode = mode;
5892	  if (FLOAT_MODE_P (mode))
5893	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5894					     CONST0_RTX (mode));
5895	}
5896#endif
5897    }
5898
5899  /* Now enter all non-volatile source expressions in the hash table
5900     if they are not already present.
5901     Record their equivalence classes in src_elt.
5902     This way we can insert the corresponding destinations into
5903     the same classes even if the actual sources are no longer in them
5904     (having been invalidated).  */
5905
5906  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5907      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5908    {
5909      struct table_elt *elt;
5910      struct table_elt *classp = sets[0].src_elt;
5911      rtx dest = SET_DEST (sets[0].rtl);
5912      enum machine_mode eqvmode = GET_MODE (dest);
5913
5914      if (GET_CODE (dest) == STRICT_LOW_PART)
5915	{
5916	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5917	  classp = 0;
5918	}
5919      if (insert_regs (src_eqv, classp, 0))
5920	{
5921	  rehash_using_reg (src_eqv);
5922	  src_eqv_hash = HASH (src_eqv, eqvmode);
5923	}
5924      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5925      elt->in_memory = src_eqv_in_memory;
5926      src_eqv_elt = elt;
5927
5928      /* Check to see if src_eqv_elt is the same as a set source which
5929	 does not yet have an elt, and if so set the elt of the set source
5930	 to src_eqv_elt.  */
5931      for (i = 0; i < n_sets; i++)
5932	if (sets[i].rtl && sets[i].src_elt == 0
5933	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5934	  sets[i].src_elt = src_eqv_elt;
5935    }
5936
5937  for (i = 0; i < n_sets; i++)
5938    if (sets[i].rtl && ! sets[i].src_volatile
5939	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5940      {
5941	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5942	  {
5943	    /* REG_EQUAL in setting a STRICT_LOW_PART
5944	       gives an equivalent for the entire destination register,
5945	       not just for the subreg being stored in now.
5946	       This is a more interesting equivalence, so we arrange later
5947	       to treat the entire reg as the destination.  */
5948	    sets[i].src_elt = src_eqv_elt;
5949	    sets[i].src_hash = src_eqv_hash;
5950	  }
5951	else
5952	  {
5953	    /* Insert source and constant equivalent into hash table, if not
5954	       already present.  */
5955	    struct table_elt *classp = src_eqv_elt;
5956	    rtx src = sets[i].src;
5957	    rtx dest = SET_DEST (sets[i].rtl);
5958	    enum machine_mode mode
5959	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5960
5961	    /* It's possible that we have a source value known to be
5962	       constant but don't have a REG_EQUAL note on the insn.
5963	       Lack of a note will mean src_eqv_elt will be NULL.  This
5964	       can happen where we've generated a SUBREG to access a
5965	       CONST_INT that is already in a register in a wider mode.
5966	       Ensure that the source expression is put in the proper
5967	       constant class.  */
5968	    if (!classp)
5969	      classp = sets[i].src_const_elt;
5970
5971	    if (sets[i].src_elt == 0)
5972	      {
5973		/* Don't put a hard register source into the table if this is
5974		   the last insn of a libcall.  In this case, we only need
5975		   to put src_eqv_elt in src_elt.  */
5976		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5977		  {
5978		    struct table_elt *elt;
5979
5980		    /* Note that these insert_regs calls cannot remove
5981		       any of the src_elt's, because they would have failed to
5982		       match if not still valid.  */
5983		    if (insert_regs (src, classp, 0))
5984		      {
5985			rehash_using_reg (src);
5986			sets[i].src_hash = HASH (src, mode);
5987		      }
5988		    elt = insert (src, classp, sets[i].src_hash, mode);
5989		    elt->in_memory = sets[i].src_in_memory;
5990		    sets[i].src_elt = classp = elt;
5991		  }
5992		else
5993		  sets[i].src_elt = classp;
5994	      }
5995	    if (sets[i].src_const && sets[i].src_const_elt == 0
5996		&& src != sets[i].src_const
5997		&& ! rtx_equal_p (sets[i].src_const, src))
5998	      sets[i].src_elt = insert (sets[i].src_const, classp,
5999					sets[i].src_const_hash, mode);
6000	  }
6001      }
6002    else if (sets[i].src_elt == 0)
6003      /* If we did not insert the source into the hash table (e.g., it was
6004	 volatile), note the equivalence class for the REG_EQUAL value, if any,
6005	 so that the destination goes into that class.  */
6006      sets[i].src_elt = src_eqv_elt;
6007
6008  /* Record destination addresses in the hash table.  This allows us to
6009     check if they are invalidated by other sets.  */
6010  for (i = 0; i < n_sets; i++)
6011    {
6012      if (sets[i].rtl)
6013	{
6014	  rtx x = sets[i].inner_dest;
6015	  struct table_elt *elt;
6016	  enum machine_mode mode;
6017	  unsigned hash;
6018
6019	  if (MEM_P (x))
6020	    {
6021	      x = XEXP (x, 0);
6022	      mode = GET_MODE (x);
6023	      hash = HASH (x, mode);
6024	      elt = lookup (x, hash, mode);
6025	      if (!elt)
6026		{
6027		  if (insert_regs (x, NULL, 0))
6028		    {
6029		      rtx dest = SET_DEST (sets[i].rtl);
6030
6031		      rehash_using_reg (x);
6032		      hash = HASH (x, mode);
6033		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6034		    }
6035		  elt = insert (x, NULL, hash, mode);
6036		}
6037
6038	      sets[i].dest_addr_elt = elt;
6039	    }
6040	  else
6041	    sets[i].dest_addr_elt = NULL;
6042	}
6043    }
6044
6045  invalidate_from_clobbers (x);
6046
6047  /* Some registers are invalidated by subroutine calls.  Memory is
6048     invalidated by non-constant calls.  */
6049
6050  if (CALL_P (insn))
6051    {
6052      if (! CONST_OR_PURE_CALL_P (insn))
6053	invalidate_memory ();
6054      invalidate_for_call ();
6055    }
6056
6057  /* Now invalidate everything set by this instruction.
6058     If a SUBREG or other funny destination is being set,
6059     sets[i].rtl is still nonzero, so here we invalidate the reg
6060     a part of which is being set.  */
6061
6062  for (i = 0; i < n_sets; i++)
6063    if (sets[i].rtl)
6064      {
6065	/* We can't use the inner dest, because the mode associated with
6066	   a ZERO_EXTRACT is significant.  */
6067	rtx dest = SET_DEST (sets[i].rtl);
6068
6069	/* Needed for registers to remove the register from its
6070	   previous quantity's chain.
6071	   Needed for memory if this is a nonvarying address, unless
6072	   we have just done an invalidate_memory that covers even those.  */
6073	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6074	  invalidate (dest, VOIDmode);
6075	else if (MEM_P (dest))
6076	  invalidate (dest, VOIDmode);
6077	else if (GET_CODE (dest) == STRICT_LOW_PART
6078		 || GET_CODE (dest) == ZERO_EXTRACT)
6079	  invalidate (XEXP (dest, 0), GET_MODE (dest));
6080      }
6081
6082  /* A volatile ASM invalidates everything.  */
6083  if (NONJUMP_INSN_P (insn)
6084      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6085      && MEM_VOLATILE_P (PATTERN (insn)))
6086    flush_hash_table ();
6087
6088  /* Make sure registers mentioned in destinations
6089     are safe for use in an expression to be inserted.
6090     This removes from the hash table
6091     any invalid entry that refers to one of these registers.
6092
6093     We don't care about the return value from mention_regs because
6094     we are going to hash the SET_DEST values unconditionally.  */
6095
6096  for (i = 0; i < n_sets; i++)
6097    {
6098      if (sets[i].rtl)
6099	{
6100	  rtx x = SET_DEST (sets[i].rtl);
6101
6102	  if (!REG_P (x))
6103	    mention_regs (x);
6104	  else
6105	    {
6106	      /* We used to rely on all references to a register becoming
6107		 inaccessible when a register changes to a new quantity,
6108		 since that changes the hash code.  However, that is not
6109		 safe, since after HASH_SIZE new quantities we get a
6110		 hash 'collision' of a register with its own invalid
6111		 entries.  And since SUBREGs have been changed not to
6112		 change their hash code with the hash code of the register,
6113		 it wouldn't work any longer at all.  So we have to check
6114		 for any invalid references lying around now.
6115		 This code is similar to the REG case in mention_regs,
6116		 but it knows that reg_tick has been incremented, and
6117		 it leaves reg_in_table as -1 .  */
6118	      unsigned int regno = REGNO (x);
6119	      unsigned int endregno
6120		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6121			   : hard_regno_nregs[regno][GET_MODE (x)]);
6122	      unsigned int i;
6123
6124	      for (i = regno; i < endregno; i++)
6125		{
6126		  if (REG_IN_TABLE (i) >= 0)
6127		    {
6128		      remove_invalid_refs (i);
6129		      REG_IN_TABLE (i) = -1;
6130		    }
6131		}
6132	    }
6133	}
6134    }
6135
6136  /* We may have just removed some of the src_elt's from the hash table.
6137     So replace each one with the current head of the same class.
6138     Also check if destination addresses have been removed.  */
6139
6140  for (i = 0; i < n_sets; i++)
6141    if (sets[i].rtl)
6142      {
6143	if (sets[i].dest_addr_elt
6144	    && sets[i].dest_addr_elt->first_same_value == 0)
6145	  {
6146	    /* The elt was removed, which means this destination s not
6147	       valid after this instruction.  */
6148	    sets[i].rtl = NULL_RTX;
6149	  }
6150	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6151	  /* If elt was removed, find current head of same class,
6152	     or 0 if nothing remains of that class.  */
6153	  {
6154	    struct table_elt *elt = sets[i].src_elt;
6155
6156	    while (elt && elt->prev_same_value)
6157	      elt = elt->prev_same_value;
6158
6159	    while (elt && elt->first_same_value == 0)
6160	      elt = elt->next_same_value;
6161	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6162	  }
6163      }
6164
6165  /* Now insert the destinations into their equivalence classes.  */
6166
6167  for (i = 0; i < n_sets; i++)
6168    if (sets[i].rtl)
6169      {
6170	rtx dest = SET_DEST (sets[i].rtl);
6171	struct table_elt *elt;
6172
6173	/* Don't record value if we are not supposed to risk allocating
6174	   floating-point values in registers that might be wider than
6175	   memory.  */
6176	if ((flag_float_store
6177	     && MEM_P (dest)
6178	     && FLOAT_MODE_P (GET_MODE (dest)))
6179	    /* Don't record BLKmode values, because we don't know the
6180	       size of it, and can't be sure that other BLKmode values
6181	       have the same or smaller size.  */
6182	    || GET_MODE (dest) == BLKmode
6183	    /* Don't record values of destinations set inside a libcall block
6184	       since we might delete the libcall.  Things should have been set
6185	       up so we won't want to reuse such a value, but we play it safe
6186	       here.  */
6187	    || libcall_insn
6188	    /* If we didn't put a REG_EQUAL value or a source into the hash
6189	       table, there is no point is recording DEST.  */
6190	    || sets[i].src_elt == 0
6191	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6192	       or SIGN_EXTEND, don't record DEST since it can cause
6193	       some tracking to be wrong.
6194
6195	       ??? Think about this more later.  */
6196	    || (GET_CODE (dest) == SUBREG
6197		&& (GET_MODE_SIZE (GET_MODE (dest))
6198		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6199		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6200		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6201	  continue;
6202
6203	/* STRICT_LOW_PART isn't part of the value BEING set,
6204	   and neither is the SUBREG inside it.
6205	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6206	if (GET_CODE (dest) == STRICT_LOW_PART)
6207	  dest = SUBREG_REG (XEXP (dest, 0));
6208
6209	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6210	  /* Registers must also be inserted into chains for quantities.  */
6211	  if (insert_regs (dest, sets[i].src_elt, 1))
6212	    {
6213	      /* If `insert_regs' changes something, the hash code must be
6214		 recalculated.  */
6215	      rehash_using_reg (dest);
6216	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6217	    }
6218
6219	elt = insert (dest, sets[i].src_elt,
6220		      sets[i].dest_hash, GET_MODE (dest));
6221
6222	elt->in_memory = (MEM_P (sets[i].inner_dest)
6223			  && !MEM_READONLY_P (sets[i].inner_dest));
6224
6225	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6226	   narrower than M2, and both M1 and M2 are the same number of words,
6227	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6228	   make that equivalence as well.
6229
6230	   However, BAR may have equivalences for which gen_lowpart
6231	   will produce a simpler value than gen_lowpart applied to
6232	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6233	   BAR's equivalences.  If we don't get a simplified form, make
6234	   the SUBREG.  It will not be used in an equivalence, but will
6235	   cause two similar assignments to be detected.
6236
6237	   Note the loop below will find SUBREG_REG (DEST) since we have
6238	   already entered SRC and DEST of the SET in the table.  */
6239
6240	if (GET_CODE (dest) == SUBREG
6241	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6242		 / UNITS_PER_WORD)
6243		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6244	    && (GET_MODE_SIZE (GET_MODE (dest))
6245		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6246	    && sets[i].src_elt != 0)
6247	  {
6248	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6249	    struct table_elt *elt, *classp = 0;
6250
6251	    for (elt = sets[i].src_elt->first_same_value; elt;
6252		 elt = elt->next_same_value)
6253	      {
6254		rtx new_src = 0;
6255		unsigned src_hash;
6256		struct table_elt *src_elt;
6257		int byte = 0;
6258
6259		/* Ignore invalid entries.  */
6260		if (!REG_P (elt->exp)
6261		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6262		  continue;
6263
6264		/* We may have already been playing subreg games.  If the
6265		   mode is already correct for the destination, use it.  */
6266		if (GET_MODE (elt->exp) == new_mode)
6267		  new_src = elt->exp;
6268		else
6269		  {
6270		    /* Calculate big endian correction for the SUBREG_BYTE.
6271		       We have already checked that M1 (GET_MODE (dest))
6272		       is not narrower than M2 (new_mode).  */
6273		    if (BYTES_BIG_ENDIAN)
6274		      byte = (GET_MODE_SIZE (GET_MODE (dest))
6275			      - GET_MODE_SIZE (new_mode));
6276
6277		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6278					           GET_MODE (dest), byte);
6279		  }
6280
6281		/* The call to simplify_gen_subreg fails if the value
6282		   is VOIDmode, yet we can't do any simplification, e.g.
6283		   for EXPR_LISTs denoting function call results.
6284		   It is invalid to construct a SUBREG with a VOIDmode
6285		   SUBREG_REG, hence a zero new_src means we can't do
6286		   this substitution.  */
6287		if (! new_src)
6288		  continue;
6289
6290		src_hash = HASH (new_src, new_mode);
6291		src_elt = lookup (new_src, src_hash, new_mode);
6292
6293		/* Put the new source in the hash table is if isn't
6294		   already.  */
6295		if (src_elt == 0)
6296		  {
6297		    if (insert_regs (new_src, classp, 0))
6298		      {
6299			rehash_using_reg (new_src);
6300			src_hash = HASH (new_src, new_mode);
6301		      }
6302		    src_elt = insert (new_src, classp, src_hash, new_mode);
6303		    src_elt->in_memory = elt->in_memory;
6304		  }
6305		else if (classp && classp != src_elt->first_same_value)
6306		  /* Show that two things that we've seen before are
6307		     actually the same.  */
6308		  merge_equiv_classes (src_elt, classp);
6309
6310		classp = src_elt->first_same_value;
6311		/* Ignore invalid entries.  */
6312		while (classp
6313		       && !REG_P (classp->exp)
6314		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6315		  classp = classp->next_same_value;
6316	      }
6317	  }
6318      }
6319
6320  /* Special handling for (set REG0 REG1) where REG0 is the
6321     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6322     be used in the sequel, so (if easily done) change this insn to
6323     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6324     that computed their value.  Then REG1 will become a dead store
6325     and won't cloud the situation for later optimizations.
6326
6327     Do not make this change if REG1 is a hard register, because it will
6328     then be used in the sequel and we may be changing a two-operand insn
6329     into a three-operand insn.
6330
6331     Also do not do this if we are operating on a copy of INSN.
6332
6333     Also don't do this if INSN ends a libcall; this would cause an unrelated
6334     register to be set in the middle of a libcall, and we then get bad code
6335     if the libcall is deleted.  */
6336
6337  if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6338      && NEXT_INSN (PREV_INSN (insn)) == insn
6339      && REG_P (SET_SRC (sets[0].rtl))
6340      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6341      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6342    {
6343      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6344      struct qty_table_elem *src_ent = &qty_table[src_q];
6345
6346      if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6347	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6348	{
6349	  rtx prev = insn;
6350	  /* Scan for the previous nonnote insn, but stop at a basic
6351	     block boundary.  */
6352	  do
6353	    {
6354	      prev = PREV_INSN (prev);
6355	    }
6356	  while (prev && NOTE_P (prev)
6357		 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6358
6359	  /* Do not swap the registers around if the previous instruction
6360	     attaches a REG_EQUIV note to REG1.
6361
6362	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6363	     from the pseudo that originally shadowed an incoming argument
6364	     to another register.  Some uses of REG_EQUIV might rely on it
6365	     being attached to REG1 rather than REG2.
6366
6367	     This section previously turned the REG_EQUIV into a REG_EQUAL
6368	     note.  We cannot do that because REG_EQUIV may provide an
6369	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
6370
6371	  if (prev != 0 && NONJUMP_INSN_P (prev)
6372	      && GET_CODE (PATTERN (prev)) == SET
6373	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6374	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6375	    {
6376	      rtx dest = SET_DEST (sets[0].rtl);
6377	      rtx src = SET_SRC (sets[0].rtl);
6378	      rtx note;
6379
6380	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6381	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6382	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6383	      apply_change_group ();
6384
6385	      /* If INSN has a REG_EQUAL note, and this note mentions
6386		 REG0, then we must delete it, because the value in
6387		 REG0 has changed.  If the note's value is REG1, we must
6388		 also delete it because that is now this insn's dest.  */
6389	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6390	      if (note != 0
6391		  && (reg_mentioned_p (dest, XEXP (note, 0))
6392		      || rtx_equal_p (src, XEXP (note, 0))))
6393		remove_note (insn, note);
6394	    }
6395	}
6396    }
6397
6398  /* If this is a conditional jump insn, record any known equivalences due to
6399     the condition being tested.  */
6400
6401  if (JUMP_P (insn)
6402      && n_sets == 1 && GET_CODE (x) == SET
6403      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6404    record_jump_equiv (insn, 0);
6405
6406#ifdef HAVE_cc0
6407  /* If the previous insn set CC0 and this insn no longer references CC0,
6408     delete the previous insn.  Here we use the fact that nothing expects CC0
6409     to be valid over an insn, which is true until the final pass.  */
6410  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6411      && (tem = single_set (prev_insn)) != 0
6412      && SET_DEST (tem) == cc0_rtx
6413      && ! reg_mentioned_p (cc0_rtx, x))
6414    delete_insn (prev_insn);
6415
6416  prev_insn_cc0 = this_insn_cc0;
6417  prev_insn_cc0_mode = this_insn_cc0_mode;
6418  prev_insn = insn;
6419#endif
6420}
6421
6422/* Remove from the hash table all expressions that reference memory.  */
6423
6424static void
6425invalidate_memory (void)
6426{
6427  int i;
6428  struct table_elt *p, *next;
6429
6430  for (i = 0; i < HASH_SIZE; i++)
6431    for (p = table[i]; p; p = next)
6432      {
6433	next = p->next_same_hash;
6434	if (p->in_memory)
6435	  remove_from_table (p, i);
6436      }
6437}
6438
6439/* If ADDR is an address that implicitly affects the stack pointer, return
6440   1 and update the register tables to show the effect.  Else, return 0.  */
6441
6442static int
6443addr_affects_sp_p (rtx addr)
6444{
6445  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6446      && REG_P (XEXP (addr, 0))
6447      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6448    {
6449      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6450	{
6451	  REG_TICK (STACK_POINTER_REGNUM)++;
6452	  /* Is it possible to use a subreg of SP?  */
6453	  SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6454	}
6455
6456      /* This should be *very* rare.  */
6457      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6458	invalidate (stack_pointer_rtx, VOIDmode);
6459
6460      return 1;
6461    }
6462
6463  return 0;
6464}
6465
6466/* Perform invalidation on the basis of everything about an insn
6467   except for invalidating the actual places that are SET in it.
6468   This includes the places CLOBBERed, and anything that might
6469   alias with something that is SET or CLOBBERed.
6470
6471   X is the pattern of the insn.  */
6472
6473static void
6474invalidate_from_clobbers (rtx x)
6475{
6476  if (GET_CODE (x) == CLOBBER)
6477    {
6478      rtx ref = XEXP (x, 0);
6479      if (ref)
6480	{
6481	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
6482	      || MEM_P (ref))
6483	    invalidate (ref, VOIDmode);
6484	  else if (GET_CODE (ref) == STRICT_LOW_PART
6485		   || GET_CODE (ref) == ZERO_EXTRACT)
6486	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6487	}
6488    }
6489  else if (GET_CODE (x) == PARALLEL)
6490    {
6491      int i;
6492      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6493	{
6494	  rtx y = XVECEXP (x, 0, i);
6495	  if (GET_CODE (y) == CLOBBER)
6496	    {
6497	      rtx ref = XEXP (y, 0);
6498	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
6499		  || MEM_P (ref))
6500		invalidate (ref, VOIDmode);
6501	      else if (GET_CODE (ref) == STRICT_LOW_PART
6502		       || GET_CODE (ref) == ZERO_EXTRACT)
6503		invalidate (XEXP (ref, 0), GET_MODE (ref));
6504	    }
6505	}
6506    }
6507}
6508
6509/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6510   and replace any registers in them with either an equivalent constant
6511   or the canonical form of the register.  If we are inside an address,
6512   only do this if the address remains valid.
6513
6514   OBJECT is 0 except when within a MEM in which case it is the MEM.
6515
6516   Return the replacement for X.  */
6517
6518static rtx
6519cse_process_notes (rtx x, rtx object)
6520{
6521  enum rtx_code code = GET_CODE (x);
6522  const char *fmt = GET_RTX_FORMAT (code);
6523  int i;
6524
6525  switch (code)
6526    {
6527    case CONST_INT:
6528    case CONST:
6529    case SYMBOL_REF:
6530    case LABEL_REF:
6531    case CONST_DOUBLE:
6532    case CONST_VECTOR:
6533    case PC:
6534    case CC0:
6535    case LO_SUM:
6536      return x;
6537
6538    case MEM:
6539      validate_change (x, &XEXP (x, 0),
6540		       cse_process_notes (XEXP (x, 0), x), 0);
6541      return x;
6542
6543    case EXPR_LIST:
6544    case INSN_LIST:
6545      if (REG_NOTE_KIND (x) == REG_EQUAL)
6546	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6547      if (XEXP (x, 1))
6548	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6549      return x;
6550
6551    case SIGN_EXTEND:
6552    case ZERO_EXTEND:
6553    case SUBREG:
6554      {
6555	rtx new = cse_process_notes (XEXP (x, 0), object);
6556	/* We don't substitute VOIDmode constants into these rtx,
6557	   since they would impede folding.  */
6558	if (GET_MODE (new) != VOIDmode)
6559	  validate_change (object, &XEXP (x, 0), new, 0);
6560	return x;
6561      }
6562
6563    case REG:
6564      i = REG_QTY (REGNO (x));
6565
6566      /* Return a constant or a constant register.  */
6567      if (REGNO_QTY_VALID_P (REGNO (x)))
6568	{
6569	  struct qty_table_elem *ent = &qty_table[i];
6570
6571	  if (ent->const_rtx != NULL_RTX
6572	      && (CONSTANT_P (ent->const_rtx)
6573		  || REG_P (ent->const_rtx)))
6574	    {
6575	      rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6576	      if (new)
6577		return new;
6578	    }
6579	}
6580
6581      /* Otherwise, canonicalize this register.  */
6582      return canon_reg (x, NULL_RTX);
6583
6584    default:
6585      break;
6586    }
6587
6588  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6589    if (fmt[i] == 'e')
6590      validate_change (object, &XEXP (x, i),
6591		       cse_process_notes (XEXP (x, i), object), 0);
6592
6593  return x;
6594}
6595
6596/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6597   since they are done elsewhere.  This function is called via note_stores.  */
6598
6599static void
6600invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6601{
6602  enum rtx_code code = GET_CODE (dest);
6603
6604  if (code == MEM
6605      && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6606      /* There are times when an address can appear varying and be a PLUS
6607	 during this scan when it would be a fixed address were we to know
6608	 the proper equivalences.  So invalidate all memory if there is
6609	 a BLKmode or nonscalar memory reference or a reference to a
6610	 variable address.  */
6611      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6612	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6613    {
6614      invalidate_memory ();
6615      return;
6616    }
6617
6618  if (GET_CODE (set) == CLOBBER
6619      || CC0_P (dest)
6620      || dest == pc_rtx)
6621    return;
6622
6623  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6624    invalidate (XEXP (dest, 0), GET_MODE (dest));
6625  else if (code == REG || code == SUBREG || code == MEM)
6626    invalidate (dest, VOIDmode);
6627}
6628
6629/* Invalidate all insns from START up to the end of the function or the
6630   next label.  This called when we wish to CSE around a block that is
6631   conditionally executed.  */
6632
6633static void
6634invalidate_skipped_block (rtx start)
6635{
6636  rtx insn;
6637
6638  for (insn = start; insn && !LABEL_P (insn);
6639       insn = NEXT_INSN (insn))
6640    {
6641      if (! INSN_P (insn))
6642	continue;
6643
6644      if (CALL_P (insn))
6645	{
6646	  if (! CONST_OR_PURE_CALL_P (insn))
6647	    invalidate_memory ();
6648	  invalidate_for_call ();
6649	}
6650
6651      invalidate_from_clobbers (PATTERN (insn));
6652      note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6653    }
6654}
6655
6656/* Find the end of INSN's basic block and return its range,
6657   the total number of SETs in all the insns of the block, the last insn of the
6658   block, and the branch path.
6659
6660   The branch path indicates which branches should be followed.  If a nonzero
6661   path size is specified, the block should be rescanned and a different set
6662   of branches will be taken.  The branch path is only used if
6663   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6664
6665   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6666   used to describe the block.  It is filled in with the information about
6667   the current block.  The incoming structure's branch path, if any, is used
6668   to construct the output branch path.  */
6669
6670static void
6671cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6672			int follow_jumps, int skip_blocks)
6673{
6674  rtx p = insn, q;
6675  int nsets = 0;
6676  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6677  rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6678  int path_size = data->path_size;
6679  int path_entry = 0;
6680  int i;
6681
6682  /* Update the previous branch path, if any.  If the last branch was
6683     previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6684     If it was previously PATH_NOT_TAKEN,
6685     shorten the path by one and look at the previous branch.  We know that
6686     at least one branch must have been taken if PATH_SIZE is nonzero.  */
6687  while (path_size > 0)
6688    {
6689      if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6690	{
6691	  data->path[path_size - 1].status = PATH_NOT_TAKEN;
6692	  break;
6693	}
6694      else
6695	path_size--;
6696    }
6697
6698  /* If the first instruction is marked with QImode, that means we've
6699     already processed this block.  Our caller will look at DATA->LAST
6700     to figure out where to go next.  We want to return the next block
6701     in the instruction stream, not some branched-to block somewhere
6702     else.  We accomplish this by pretending our called forbid us to
6703     follow jumps, or skip blocks.  */
6704  if (GET_MODE (insn) == QImode)
6705    follow_jumps = skip_blocks = 0;
6706
6707  /* Scan to end of this basic block.  */
6708  while (p && !LABEL_P (p))
6709    {
6710      /* Don't cse over a call to setjmp; on some machines (eg VAX)
6711	 the regs restored by the longjmp come from
6712	 a later time than the setjmp.  */
6713      if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6714	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6715	break;
6716
6717      /* A PARALLEL can have lots of SETs in it,
6718	 especially if it is really an ASM_OPERANDS.  */
6719      if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6720	nsets += XVECLEN (PATTERN (p), 0);
6721      else if (!NOTE_P (p))
6722	nsets += 1;
6723
6724      /* Ignore insns made by CSE; they cannot affect the boundaries of
6725	 the basic block.  */
6726
6727      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6728	high_cuid = INSN_CUID (p);
6729      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6730	low_cuid = INSN_CUID (p);
6731
6732      /* See if this insn is in our branch path.  If it is and we are to
6733	 take it, do so.  */
6734      if (path_entry < path_size && data->path[path_entry].branch == p)
6735	{
6736	  if (data->path[path_entry].status != PATH_NOT_TAKEN)
6737	    p = JUMP_LABEL (p);
6738
6739	  /* Point to next entry in path, if any.  */
6740	  path_entry++;
6741	}
6742
6743      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6744	 was specified, we haven't reached our maximum path length, there are
6745	 insns following the target of the jump, this is the only use of the
6746	 jump label, and the target label is preceded by a BARRIER.
6747
6748	 Alternatively, we can follow the jump if it branches around a
6749	 block of code and there are no other branches into the block.
6750	 In this case invalidate_skipped_block will be called to invalidate any
6751	 registers set in the block when following the jump.  */
6752
6753      else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6754	       && JUMP_P (p)
6755	       && GET_CODE (PATTERN (p)) == SET
6756	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6757	       && JUMP_LABEL (p) != 0
6758	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6759	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6760	{
6761	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6762	    if ((!NOTE_P (q)
6763		 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6764		 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6765		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6766		&& (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6767	      break;
6768
6769	  /* If we ran into a BARRIER, this code is an extension of the
6770	     basic block when the branch is taken.  */
6771	  if (follow_jumps && q != 0 && BARRIER_P (q))
6772	    {
6773	      /* Don't allow ourself to keep walking around an
6774		 always-executed loop.  */
6775	      if (next_real_insn (q) == next)
6776		{
6777		  p = NEXT_INSN (p);
6778		  continue;
6779		}
6780
6781	      /* Similarly, don't put a branch in our path more than once.  */
6782	      for (i = 0; i < path_entry; i++)
6783		if (data->path[i].branch == p)
6784		  break;
6785
6786	      if (i != path_entry)
6787		break;
6788
6789	      data->path[path_entry].branch = p;
6790	      data->path[path_entry++].status = PATH_TAKEN;
6791
6792	      /* This branch now ends our path.  It was possible that we
6793		 didn't see this branch the last time around (when the
6794		 insn in front of the target was a JUMP_INSN that was
6795		 turned into a no-op).  */
6796	      path_size = path_entry;
6797
6798	      p = JUMP_LABEL (p);
6799	      /* Mark block so we won't scan it again later.  */
6800	      PUT_MODE (NEXT_INSN (p), QImode);
6801	    }
6802	  /* Detect a branch around a block of code.  */
6803	  else if (skip_blocks && q != 0 && !LABEL_P (q))
6804	    {
6805	      rtx tmp;
6806
6807	      if (next_real_insn (q) == next)
6808		{
6809		  p = NEXT_INSN (p);
6810		  continue;
6811		}
6812
6813	      for (i = 0; i < path_entry; i++)
6814		if (data->path[i].branch == p)
6815		  break;
6816
6817	      if (i != path_entry)
6818		break;
6819
6820	      /* This is no_labels_between_p (p, q) with an added check for
6821		 reaching the end of a function (in case Q precedes P).  */
6822	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6823		if (LABEL_P (tmp))
6824		  break;
6825
6826	      if (tmp == q)
6827		{
6828		  data->path[path_entry].branch = p;
6829		  data->path[path_entry++].status = PATH_AROUND;
6830
6831		  path_size = path_entry;
6832
6833		  p = JUMP_LABEL (p);
6834		  /* Mark block so we won't scan it again later.  */
6835		  PUT_MODE (NEXT_INSN (p), QImode);
6836		}
6837	    }
6838	}
6839      p = NEXT_INSN (p);
6840    }
6841
6842  data->low_cuid = low_cuid;
6843  data->high_cuid = high_cuid;
6844  data->nsets = nsets;
6845  data->last = p;
6846
6847  /* If all jumps in the path are not taken, set our path length to zero
6848     so a rescan won't be done.  */
6849  for (i = path_size - 1; i >= 0; i--)
6850    if (data->path[i].status != PATH_NOT_TAKEN)
6851      break;
6852
6853  if (i == -1)
6854    data->path_size = 0;
6855  else
6856    data->path_size = path_size;
6857
6858  /* End the current branch path.  */
6859  data->path[path_size].branch = 0;
6860}
6861
6862/* Perform cse on the instructions of a function.
6863   F is the first instruction.
6864   NREGS is one plus the highest pseudo-reg number used in the instruction.
6865
6866   Returns 1 if jump_optimize should be redone due to simplifications
6867   in conditional jump instructions.  */
6868
6869int
6870cse_main (rtx f, int nregs, FILE *file)
6871{
6872  struct cse_basic_block_data val;
6873  rtx insn = f;
6874  int i;
6875
6876  init_cse_reg_info (nregs);
6877
6878  val.path = xmalloc (sizeof (struct branch_path)
6879		      * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6880
6881  cse_jumps_altered = 0;
6882  recorded_label_ref = 0;
6883  constant_pool_entries_cost = 0;
6884  constant_pool_entries_regcost = 0;
6885  val.path_size = 0;
6886  rtl_hooks = cse_rtl_hooks;
6887
6888  init_recog ();
6889  init_alias_analysis ();
6890
6891  reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6892
6893  /* Find the largest uid.  */
6894
6895  max_uid = get_max_uid ();
6896  uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6897
6898  /* Compute the mapping from uids to cuids.
6899     CUIDs are numbers assigned to insns, like uids,
6900     except that cuids increase monotonically through the code.
6901     Don't assign cuids to line-number NOTEs, so that the distance in cuids
6902     between two insns is not affected by -g.  */
6903
6904  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6905    {
6906      if (!NOTE_P (insn)
6907	  || NOTE_LINE_NUMBER (insn) < 0)
6908	INSN_CUID (insn) = ++i;
6909      else
6910	/* Give a line number note the same cuid as preceding insn.  */
6911	INSN_CUID (insn) = i;
6912    }
6913
6914  /* Loop over basic blocks.
6915     Compute the maximum number of qty's needed for each basic block
6916     (which is 2 for each SET).  */
6917  insn = f;
6918  while (insn)
6919    {
6920      cse_altered = 0;
6921      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6922			      flag_cse_skip_blocks);
6923
6924      /* If this basic block was already processed or has no sets, skip it.  */
6925      if (val.nsets == 0 || GET_MODE (insn) == QImode)
6926	{
6927	  PUT_MODE (insn, VOIDmode);
6928	  insn = (val.last ? NEXT_INSN (val.last) : 0);
6929	  val.path_size = 0;
6930	  continue;
6931	}
6932
6933      cse_basic_block_start = val.low_cuid;
6934      cse_basic_block_end = val.high_cuid;
6935      max_qty = val.nsets * 2;
6936
6937      if (file)
6938	fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
6939		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6940		 val.nsets);
6941
6942      /* Make MAX_QTY bigger to give us room to optimize
6943	 past the end of this basic block, if that should prove useful.  */
6944      if (max_qty < 500)
6945	max_qty = 500;
6946
6947      /* If this basic block is being extended by following certain jumps,
6948         (see `cse_end_of_basic_block'), we reprocess the code from the start.
6949         Otherwise, we start after this basic block.  */
6950      if (val.path_size > 0)
6951	cse_basic_block (insn, val.last, val.path);
6952      else
6953	{
6954	  int old_cse_jumps_altered = cse_jumps_altered;
6955	  rtx temp;
6956
6957	  /* When cse changes a conditional jump to an unconditional
6958	     jump, we want to reprocess the block, since it will give
6959	     us a new branch path to investigate.  */
6960	  cse_jumps_altered = 0;
6961	  temp = cse_basic_block (insn, val.last, val.path);
6962	  if (cse_jumps_altered == 0
6963	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6964	    insn = temp;
6965
6966	  cse_jumps_altered |= old_cse_jumps_altered;
6967	}
6968
6969      if (cse_altered)
6970	ggc_collect ();
6971
6972#ifdef USE_C_ALLOCA
6973      alloca (0);
6974#endif
6975    }
6976
6977  /* Clean up.  */
6978  end_alias_analysis ();
6979  free (uid_cuid);
6980  free (reg_eqv_table);
6981  free (val.path);
6982  rtl_hooks = general_rtl_hooks;
6983
6984  return cse_jumps_altered || recorded_label_ref;
6985}
6986
6987/* Process a single basic block.  FROM and TO and the limits of the basic
6988   block.  NEXT_BRANCH points to the branch path when following jumps or
6989   a null path when not following jumps.  */
6990
6991static rtx
6992cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6993{
6994  rtx insn;
6995  int to_usage = 0;
6996  rtx libcall_insn = NULL_RTX;
6997  int num_insns = 0;
6998  int no_conflict = 0;
6999
7000  /* Allocate the space needed by qty_table.  */
7001  qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
7002
7003  new_basic_block ();
7004
7005  /* TO might be a label.  If so, protect it from being deleted.  */
7006  if (to != 0 && LABEL_P (to))
7007    ++LABEL_NUSES (to);
7008
7009  for (insn = from; insn != to; insn = NEXT_INSN (insn))
7010    {
7011      enum rtx_code code = GET_CODE (insn);
7012
7013      /* If we have processed 1,000 insns, flush the hash table to
7014	 avoid extreme quadratic behavior.  We must not include NOTEs
7015	 in the count since there may be more of them when generating
7016	 debugging information.  If we clear the table at different
7017	 times, code generated with -g -O might be different than code
7018	 generated with -O but not -g.
7019
7020	 ??? This is a real kludge and needs to be done some other way.
7021	 Perhaps for 2.9.  */
7022      if (code != NOTE && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
7023	{
7024	  flush_hash_table ();
7025	  num_insns = 0;
7026	}
7027
7028      /* See if this is a branch that is part of the path.  If so, and it is
7029	 to be taken, do so.  */
7030      if (next_branch->branch == insn)
7031	{
7032	  enum taken status = next_branch++->status;
7033	  if (status != PATH_NOT_TAKEN)
7034	    {
7035	      if (status == PATH_TAKEN)
7036		record_jump_equiv (insn, 1);
7037	      else
7038		invalidate_skipped_block (NEXT_INSN (insn));
7039
7040	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7041		 Then follow this branch.  */
7042#ifdef HAVE_cc0
7043	      prev_insn_cc0 = 0;
7044	      prev_insn = insn;
7045#endif
7046	      insn = JUMP_LABEL (insn);
7047	      continue;
7048	    }
7049	}
7050
7051      if (GET_MODE (insn) == QImode)
7052	PUT_MODE (insn, VOIDmode);
7053
7054      if (GET_RTX_CLASS (code) == RTX_INSN)
7055	{
7056	  rtx p;
7057
7058	  /* Process notes first so we have all notes in canonical forms when
7059	     looking for duplicate operations.  */
7060
7061	  if (REG_NOTES (insn))
7062	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7063
7064	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7065	     we do not want to record destinations.  The last insn of a
7066	     LIBCALL block is not considered to be part of the block, since
7067	     its destination is the result of the block and hence should be
7068	     recorded.  */
7069
7070	  if (REG_NOTES (insn) != 0)
7071	    {
7072	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7073		libcall_insn = XEXP (p, 0);
7074	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7075		{
7076		  /* Keep libcall_insn for the last SET insn of a no-conflict
7077		     block to prevent changing the destination.  */
7078		  if (! no_conflict)
7079		    libcall_insn = 0;
7080		  else
7081		    no_conflict = -1;
7082		}
7083	      else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7084		no_conflict = 1;
7085	    }
7086
7087	  cse_insn (insn, libcall_insn);
7088
7089	  if (no_conflict == -1)
7090	    {
7091	      libcall_insn = 0;
7092	      no_conflict = 0;
7093	    }
7094
7095	  /* If we haven't already found an insn where we added a LABEL_REF,
7096	     check this one.  */
7097	  if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
7098	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7099			       (void *) insn))
7100	    recorded_label_ref = 1;
7101	}
7102
7103      /* If INSN is now an unconditional jump, skip to the end of our
7104	 basic block by pretending that we just did the last insn in the
7105	 basic block.  If we are jumping to the end of our block, show
7106	 that we can have one usage of TO.  */
7107
7108      if (any_uncondjump_p (insn))
7109	{
7110	  if (to == 0)
7111	    {
7112	      free (qty_table);
7113	      return 0;
7114	    }
7115
7116	  if (JUMP_LABEL (insn) == to)
7117	    to_usage = 1;
7118
7119	  /* Maybe TO was deleted because the jump is unconditional.
7120	     If so, there is nothing left in this basic block.  */
7121	  /* ??? Perhaps it would be smarter to set TO
7122	     to whatever follows this insn,
7123	     and pretend the basic block had always ended here.  */
7124	  if (INSN_DELETED_P (to))
7125	    break;
7126
7127	  insn = PREV_INSN (to);
7128	}
7129
7130      /* See if it is ok to keep on going past the label
7131	 which used to end our basic block.  Remember that we incremented
7132	 the count of that label, so we decrement it here.  If we made
7133	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7134	 want to count the use in that jump.  */
7135
7136      if (to != 0 && NEXT_INSN (insn) == to
7137	  && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7138	{
7139	  struct cse_basic_block_data val;
7140	  rtx prev;
7141
7142	  insn = NEXT_INSN (to);
7143
7144	  /* If TO was the last insn in the function, we are done.  */
7145	  if (insn == 0)
7146	    {
7147	      free (qty_table);
7148	      return 0;
7149	    }
7150
7151	  /* If TO was preceded by a BARRIER we are done with this block
7152	     because it has no continuation.  */
7153	  prev = prev_nonnote_insn (to);
7154	  if (prev && BARRIER_P (prev))
7155	    {
7156	      free (qty_table);
7157	      return insn;
7158	    }
7159
7160	  /* Find the end of the following block.  Note that we won't be
7161	     following branches in this case.  */
7162	  to_usage = 0;
7163	  val.path_size = 0;
7164	  val.path = xmalloc (sizeof (struct branch_path)
7165			      * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7166	  cse_end_of_basic_block (insn, &val, 0, 0);
7167	  free (val.path);
7168
7169	  /* If the tables we allocated have enough space left
7170	     to handle all the SETs in the next basic block,
7171	     continue through it.  Otherwise, return,
7172	     and that block will be scanned individually.  */
7173	  if (val.nsets * 2 + next_qty > max_qty)
7174	    break;
7175
7176	  cse_basic_block_start = val.low_cuid;
7177	  cse_basic_block_end = val.high_cuid;
7178	  to = val.last;
7179
7180	  /* Prevent TO from being deleted if it is a label.  */
7181	  if (to != 0 && LABEL_P (to))
7182	    ++LABEL_NUSES (to);
7183
7184	  /* Back up so we process the first insn in the extension.  */
7185	  insn = PREV_INSN (insn);
7186	}
7187    }
7188
7189  gcc_assert (next_qty <= max_qty);
7190
7191  free (qty_table);
7192
7193  return to ? NEXT_INSN (to) : 0;
7194}
7195
7196/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7197   there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7198
7199static int
7200check_for_label_ref (rtx *rtl, void *data)
7201{
7202  rtx insn = (rtx) data;
7203
7204  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7205     we must rerun jump since it needs to place the note.  If this is a
7206     LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7207     since no REG_LABEL will be added.  */
7208  return (GET_CODE (*rtl) == LABEL_REF
7209	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7210	  && LABEL_P (XEXP (*rtl, 0))
7211	  && INSN_UID (XEXP (*rtl, 0)) != 0
7212	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7213}
7214
7215/* Count the number of times registers are used (not set) in X.
7216   COUNTS is an array in which we accumulate the count, INCR is how much
7217   we count each register usage.
7218
7219   Don't count a usage of DEST, which is the SET_DEST of a SET which
7220   contains X in its SET_SRC.  This is because such a SET does not
7221   modify the liveness of DEST.
7222   DEST is set to pc_rtx for a trapping insn, which means that we must count
7223   uses of a SET_DEST regardless because the insn can't be deleted here.  */
7224
7225static void
7226count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7227{
7228  enum rtx_code code;
7229  rtx note;
7230  const char *fmt;
7231  int i, j;
7232
7233  if (x == 0)
7234    return;
7235
7236  switch (code = GET_CODE (x))
7237    {
7238    case REG:
7239      if (x != dest)
7240	counts[REGNO (x)] += incr;
7241      return;
7242
7243    case PC:
7244    case CC0:
7245    case CONST:
7246    case CONST_INT:
7247    case CONST_DOUBLE:
7248    case CONST_VECTOR:
7249    case SYMBOL_REF:
7250    case LABEL_REF:
7251      return;
7252
7253    case CLOBBER:
7254      /* If we are clobbering a MEM, mark any registers inside the address
7255         as being used.  */
7256      if (MEM_P (XEXP (x, 0)))
7257	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7258      return;
7259
7260    case SET:
7261      /* Unless we are setting a REG, count everything in SET_DEST.  */
7262      if (!REG_P (SET_DEST (x)))
7263	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7264      count_reg_usage (SET_SRC (x), counts,
7265		       dest ? dest : SET_DEST (x),
7266		       incr);
7267      return;
7268
7269    case CALL_INSN:
7270    case INSN:
7271    case JUMP_INSN:
7272    /* We expect dest to be NULL_RTX here.  If the insn may trap, mark
7273       this fact by setting DEST to pc_rtx.  */
7274      if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
7275	dest = pc_rtx;
7276      if (code == CALL_INSN)
7277	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
7278      count_reg_usage (PATTERN (x), counts, dest, incr);
7279
7280      /* Things used in a REG_EQUAL note aren't dead since loop may try to
7281	 use them.  */
7282
7283      note = find_reg_equal_equiv_note (x);
7284      if (note)
7285	{
7286	  rtx eqv = XEXP (note, 0);
7287
7288	  if (GET_CODE (eqv) == EXPR_LIST)
7289	  /* This REG_EQUAL note describes the result of a function call.
7290	     Process all the arguments.  */
7291	    do
7292	      {
7293		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
7294		eqv = XEXP (eqv, 1);
7295	      }
7296	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
7297	  else
7298	    count_reg_usage (eqv, counts, dest, incr);
7299	}
7300      return;
7301
7302    case EXPR_LIST:
7303      if (REG_NOTE_KIND (x) == REG_EQUAL
7304	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7305	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7306	     involving registers in the address.  */
7307	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
7308	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7309
7310      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7311      return;
7312
7313    case ASM_OPERANDS:
7314      /* If the asm is volatile, then this insn cannot be deleted,
7315	 and so the inputs *must* be live.  */
7316      if (MEM_VOLATILE_P (x))
7317	dest = NULL_RTX;
7318      /* Iterate over just the inputs, not the constraints as well.  */
7319      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7320	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
7321      return;
7322
7323    case INSN_LIST:
7324      gcc_unreachable ();
7325
7326    default:
7327      break;
7328    }
7329
7330  fmt = GET_RTX_FORMAT (code);
7331  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7332    {
7333      if (fmt[i] == 'e')
7334	count_reg_usage (XEXP (x, i), counts, dest, incr);
7335      else if (fmt[i] == 'E')
7336	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7337	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7338    }
7339}
7340
7341/* Return true if set is live.  */
7342static bool
7343set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
7344	    int *counts)
7345{
7346#ifdef HAVE_cc0
7347  rtx tem;
7348#endif
7349
7350  if (set_noop_p (set))
7351    ;
7352
7353#ifdef HAVE_cc0
7354  else if (GET_CODE (SET_DEST (set)) == CC0
7355	   && !side_effects_p (SET_SRC (set))
7356	   && ((tem = next_nonnote_insn (insn)) == 0
7357	       || !INSN_P (tem)
7358	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7359    return false;
7360#endif
7361  else if (!REG_P (SET_DEST (set))
7362	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7363	   || counts[REGNO (SET_DEST (set))] != 0
7364	   || side_effects_p (SET_SRC (set)))
7365    return true;
7366  return false;
7367}
7368
7369/* Return true if insn is live.  */
7370
7371static bool
7372insn_live_p (rtx insn, int *counts)
7373{
7374  int i;
7375  if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7376    return true;
7377  else if (GET_CODE (PATTERN (insn)) == SET)
7378    return set_live_p (PATTERN (insn), insn, counts);
7379  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7380    {
7381      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7382	{
7383	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7384
7385	  if (GET_CODE (elt) == SET)
7386	    {
7387	      if (set_live_p (elt, insn, counts))
7388		return true;
7389	    }
7390	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7391	    return true;
7392	}
7393      return false;
7394    }
7395  else
7396    return true;
7397}
7398
7399/* Return true if libcall is dead as a whole.  */
7400
7401static bool
7402dead_libcall_p (rtx insn, int *counts)
7403{
7404  rtx note, set, new;
7405
7406  /* See if there's a REG_EQUAL note on this insn and try to
7407     replace the source with the REG_EQUAL expression.
7408
7409     We assume that insns with REG_RETVALs can only be reg->reg
7410     copies at this point.  */
7411  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7412  if (!note)
7413    return false;
7414
7415  set = single_set (insn);
7416  if (!set)
7417    return false;
7418
7419  new = simplify_rtx (XEXP (note, 0));
7420  if (!new)
7421    new = XEXP (note, 0);
7422
7423  /* While changing insn, we must update the counts accordingly.  */
7424  count_reg_usage (insn, counts, NULL_RTX, -1);
7425
7426  if (validate_change (insn, &SET_SRC (set), new, 0))
7427    {
7428      count_reg_usage (insn, counts, NULL_RTX, 1);
7429      remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7430      remove_note (insn, note);
7431      return true;
7432    }
7433
7434  if (CONSTANT_P (new))
7435    {
7436      new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7437      if (new && validate_change (insn, &SET_SRC (set), new, 0))
7438	{
7439	  count_reg_usage (insn, counts, NULL_RTX, 1);
7440	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7441	  remove_note (insn, note);
7442	  return true;
7443	}
7444    }
7445
7446  count_reg_usage (insn, counts, NULL_RTX, 1);
7447  return false;
7448}
7449
7450/* Scan all the insns and delete any that are dead; i.e., they store a register
7451   that is never used or they copy a register to itself.
7452
7453   This is used to remove insns made obviously dead by cse, loop or other
7454   optimizations.  It improves the heuristics in loop since it won't try to
7455   move dead invariants out of loops or make givs for dead quantities.  The
7456   remaining passes of the compilation are also sped up.  */
7457
7458int
7459delete_trivially_dead_insns (rtx insns, int nreg)
7460{
7461  int *counts;
7462  rtx insn, prev;
7463  int in_libcall = 0, dead_libcall = 0;
7464  int ndead = 0;
7465
7466  timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7467  /* First count the number of times each register is used.  */
7468  counts = xcalloc (nreg, sizeof (int));
7469  for (insn = insns; insn; insn = NEXT_INSN (insn))
7470    if (INSN_P (insn))
7471      count_reg_usage (insn, counts, NULL_RTX, 1);
7472
7473  /* Go from the last insn to the first and delete insns that only set unused
7474     registers or copy a register to itself.  As we delete an insn, remove
7475     usage counts for registers it uses.
7476
7477     The first jump optimization pass may leave a real insn as the last
7478     insn in the function.   We must not skip that insn or we may end
7479     up deleting code that is not really dead.  */
7480  for (insn = get_last_insn (); insn; insn = prev)
7481    {
7482      int live_insn = 0;
7483
7484      prev = PREV_INSN (insn);
7485      if (!INSN_P (insn))
7486	continue;
7487
7488      /* Don't delete any insns that are part of a libcall block unless
7489	 we can delete the whole libcall block.
7490
7491	 Flow or loop might get confused if we did that.  Remember
7492	 that we are scanning backwards.  */
7493      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7494	{
7495	  in_libcall = 1;
7496	  live_insn = 1;
7497	  dead_libcall = dead_libcall_p (insn, counts);
7498	}
7499      else if (in_libcall)
7500	live_insn = ! dead_libcall;
7501      else
7502	live_insn = insn_live_p (insn, counts);
7503
7504      /* If this is a dead insn, delete it and show registers in it aren't
7505	 being used.  */
7506
7507      if (! live_insn)
7508	{
7509	  count_reg_usage (insn, counts, NULL_RTX, -1);
7510	  delete_insn_and_edges (insn);
7511	  ndead++;
7512	}
7513
7514      if (in_libcall && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7515	{
7516	  in_libcall = 0;
7517	  dead_libcall = 0;
7518	}
7519    }
7520
7521  if (dump_file && ndead)
7522    fprintf (dump_file, "Deleted %i trivially dead insns\n",
7523	     ndead);
7524  /* Clean up.  */
7525  free (counts);
7526  timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7527  return ndead;
7528}
7529
7530/* This function is called via for_each_rtx.  The argument, NEWREG, is
7531   a condition code register with the desired mode.  If we are looking
7532   at the same register in a different mode, replace it with
7533   NEWREG.  */
7534
7535static int
7536cse_change_cc_mode (rtx *loc, void *data)
7537{
7538  struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7539
7540  if (*loc
7541      && REG_P (*loc)
7542      && REGNO (*loc) == REGNO (args->newreg)
7543      && GET_MODE (*loc) != GET_MODE (args->newreg))
7544    {
7545      validate_change (args->insn, loc, args->newreg, 1);
7546
7547      return -1;
7548    }
7549  return 0;
7550}
7551
7552/* Change the mode of any reference to the register REGNO (NEWREG) to
7553   GET_MODE (NEWREG) in INSN.  */
7554
7555static void
7556cse_change_cc_mode_insn (rtx insn, rtx newreg)
7557{
7558  struct change_cc_mode_args args;
7559  int success;
7560
7561  if (!INSN_P (insn))
7562    return;
7563
7564  args.insn = insn;
7565  args.newreg = newreg;
7566
7567  for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7568  for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7569
7570  /* If the following assertion was triggered, there is most probably
7571     something wrong with the cc_modes_compatible back end function.
7572     CC modes only can be considered compatible if the insn - with the mode
7573     replaced by any of the compatible modes - can still be recognized.  */
7574  success = apply_change_group ();
7575  gcc_assert (success);
7576}
7577
7578/* Change the mode of any reference to the register REGNO (NEWREG) to
7579   GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7580   any instruction which modifies NEWREG.  */
7581
7582static void
7583cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7584{
7585  rtx insn;
7586
7587  for (insn = start; insn != end; insn = NEXT_INSN (insn))
7588    {
7589      if (! INSN_P (insn))
7590	continue;
7591
7592      if (reg_set_p (newreg, insn))
7593	return;
7594
7595      cse_change_cc_mode_insn (insn, newreg);
7596    }
7597}
7598
7599/* BB is a basic block which finishes with CC_REG as a condition code
7600   register which is set to CC_SRC.  Look through the successors of BB
7601   to find blocks which have a single predecessor (i.e., this one),
7602   and look through those blocks for an assignment to CC_REG which is
7603   equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7604   permitted to change the mode of CC_SRC to a compatible mode.  This
7605   returns VOIDmode if no equivalent assignments were found.
7606   Otherwise it returns the mode which CC_SRC should wind up with.
7607
7608   The main complexity in this function is handling the mode issues.
7609   We may have more than one duplicate which we can eliminate, and we
7610   try to find a mode which will work for multiple duplicates.  */
7611
7612static enum machine_mode
7613cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7614{
7615  bool found_equiv;
7616  enum machine_mode mode;
7617  unsigned int insn_count;
7618  edge e;
7619  rtx insns[2];
7620  enum machine_mode modes[2];
7621  rtx last_insns[2];
7622  unsigned int i;
7623  rtx newreg;
7624  edge_iterator ei;
7625
7626  /* We expect to have two successors.  Look at both before picking
7627     the final mode for the comparison.  If we have more successors
7628     (i.e., some sort of table jump, although that seems unlikely),
7629     then we require all beyond the first two to use the same
7630     mode.  */
7631
7632  found_equiv = false;
7633  mode = GET_MODE (cc_src);
7634  insn_count = 0;
7635  FOR_EACH_EDGE (e, ei, bb->succs)
7636    {
7637      rtx insn;
7638      rtx end;
7639
7640      if (e->flags & EDGE_COMPLEX)
7641	continue;
7642
7643      if (EDGE_COUNT (e->dest->preds) != 1
7644	  || e->dest == EXIT_BLOCK_PTR)
7645	continue;
7646
7647      end = NEXT_INSN (BB_END (e->dest));
7648      for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7649	{
7650	  rtx set;
7651
7652	  if (! INSN_P (insn))
7653	    continue;
7654
7655	  /* If CC_SRC is modified, we have to stop looking for
7656	     something which uses it.  */
7657	  if (modified_in_p (cc_src, insn))
7658	    break;
7659
7660	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7661	  set = single_set (insn);
7662	  if (set
7663	      && REG_P (SET_DEST (set))
7664	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7665	    {
7666	      bool found;
7667	      enum machine_mode set_mode;
7668	      enum machine_mode comp_mode;
7669
7670	      found = false;
7671	      set_mode = GET_MODE (SET_SRC (set));
7672	      comp_mode = set_mode;
7673	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7674		found = true;
7675	      else if (GET_CODE (cc_src) == COMPARE
7676		       && GET_CODE (SET_SRC (set)) == COMPARE
7677		       && mode != set_mode
7678		       && rtx_equal_p (XEXP (cc_src, 0),
7679				       XEXP (SET_SRC (set), 0))
7680		       && rtx_equal_p (XEXP (cc_src, 1),
7681				       XEXP (SET_SRC (set), 1)))
7682
7683		{
7684		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7685		  if (comp_mode != VOIDmode
7686		      && (can_change_mode || comp_mode == mode))
7687		    found = true;
7688		}
7689
7690	      if (found)
7691		{
7692		  found_equiv = true;
7693		  if (insn_count < ARRAY_SIZE (insns))
7694		    {
7695		      insns[insn_count] = insn;
7696		      modes[insn_count] = set_mode;
7697		      last_insns[insn_count] = end;
7698		      ++insn_count;
7699
7700		      if (mode != comp_mode)
7701			{
7702			  gcc_assert (can_change_mode);
7703			  mode = comp_mode;
7704
7705			  /* The modified insn will be re-recognized later.  */
7706			  PUT_MODE (cc_src, mode);
7707			}
7708		    }
7709		  else
7710		    {
7711		      if (set_mode != mode)
7712			{
7713			  /* We found a matching expression in the
7714			     wrong mode, but we don't have room to
7715			     store it in the array.  Punt.  This case
7716			     should be rare.  */
7717			  break;
7718			}
7719		      /* INSN sets CC_REG to a value equal to CC_SRC
7720			 with the right mode.  We can simply delete
7721			 it.  */
7722		      delete_insn (insn);
7723		    }
7724
7725		  /* We found an instruction to delete.  Keep looking,
7726		     in the hopes of finding a three-way jump.  */
7727		  continue;
7728		}
7729
7730	      /* We found an instruction which sets the condition
7731		 code, so don't look any farther.  */
7732	      break;
7733	    }
7734
7735	  /* If INSN sets CC_REG in some other way, don't look any
7736	     farther.  */
7737	  if (reg_set_p (cc_reg, insn))
7738	    break;
7739	}
7740
7741      /* If we fell off the bottom of the block, we can keep looking
7742	 through successors.  We pass CAN_CHANGE_MODE as false because
7743	 we aren't prepared to handle compatibility between the
7744	 further blocks and this block.  */
7745      if (insn == end)
7746	{
7747	  enum machine_mode submode;
7748
7749	  submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7750	  if (submode != VOIDmode)
7751	    {
7752	      gcc_assert (submode == mode);
7753	      found_equiv = true;
7754	      can_change_mode = false;
7755	    }
7756	}
7757    }
7758
7759  if (! found_equiv)
7760    return VOIDmode;
7761
7762  /* Now INSN_COUNT is the number of instructions we found which set
7763     CC_REG to a value equivalent to CC_SRC.  The instructions are in
7764     INSNS.  The modes used by those instructions are in MODES.  */
7765
7766  newreg = NULL_RTX;
7767  for (i = 0; i < insn_count; ++i)
7768    {
7769      if (modes[i] != mode)
7770	{
7771	  /* We need to change the mode of CC_REG in INSNS[i] and
7772	     subsequent instructions.  */
7773	  if (! newreg)
7774	    {
7775	      if (GET_MODE (cc_reg) == mode)
7776		newreg = cc_reg;
7777	      else
7778		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7779	    }
7780	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7781				    newreg);
7782	}
7783
7784      delete_insn (insns[i]);
7785    }
7786
7787  return mode;
7788}
7789
7790/* If we have a fixed condition code register (or two), walk through
7791   the instructions and try to eliminate duplicate assignments.  */
7792
7793void
7794cse_condition_code_reg (void)
7795{
7796  unsigned int cc_regno_1;
7797  unsigned int cc_regno_2;
7798  rtx cc_reg_1;
7799  rtx cc_reg_2;
7800  basic_block bb;
7801
7802  if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7803    return;
7804
7805  cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7806  if (cc_regno_2 != INVALID_REGNUM)
7807    cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7808  else
7809    cc_reg_2 = NULL_RTX;
7810
7811  FOR_EACH_BB (bb)
7812    {
7813      rtx last_insn;
7814      rtx cc_reg;
7815      rtx insn;
7816      rtx cc_src_insn;
7817      rtx cc_src;
7818      enum machine_mode mode;
7819      enum machine_mode orig_mode;
7820
7821      /* Look for blocks which end with a conditional jump based on a
7822	 condition code register.  Then look for the instruction which
7823	 sets the condition code register.  Then look through the
7824	 successor blocks for instructions which set the condition
7825	 code register to the same value.  There are other possible
7826	 uses of the condition code register, but these are by far the
7827	 most common and the ones which we are most likely to be able
7828	 to optimize.  */
7829
7830      last_insn = BB_END (bb);
7831      if (!JUMP_P (last_insn))
7832	continue;
7833
7834      if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7835	cc_reg = cc_reg_1;
7836      else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7837	cc_reg = cc_reg_2;
7838      else
7839	continue;
7840
7841      cc_src_insn = NULL_RTX;
7842      cc_src = NULL_RTX;
7843      for (insn = PREV_INSN (last_insn);
7844	   insn && insn != PREV_INSN (BB_HEAD (bb));
7845	   insn = PREV_INSN (insn))
7846	{
7847	  rtx set;
7848
7849	  if (! INSN_P (insn))
7850	    continue;
7851	  set = single_set (insn);
7852	  if (set
7853	      && REG_P (SET_DEST (set))
7854	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7855	    {
7856	      cc_src_insn = insn;
7857	      cc_src = SET_SRC (set);
7858	      break;
7859	    }
7860	  else if (reg_set_p (cc_reg, insn))
7861	    break;
7862	}
7863
7864      if (! cc_src_insn)
7865	continue;
7866
7867      if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7868	continue;
7869
7870      /* Now CC_REG is a condition code register used for a
7871	 conditional jump at the end of the block, and CC_SRC, in
7872	 CC_SRC_INSN, is the value to which that condition code
7873	 register is set, and CC_SRC is still meaningful at the end of
7874	 the basic block.  */
7875
7876      orig_mode = GET_MODE (cc_src);
7877      mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7878      if (mode != VOIDmode)
7879	{
7880	  gcc_assert (mode == GET_MODE (cc_src));
7881	  if (mode != orig_mode)
7882	    {
7883	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7884
7885	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7886
7887	      /* Do the same in the following insns that use the
7888		 current value of CC_REG within BB.  */
7889	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7890					NEXT_INSN (last_insn),
7891					newreg);
7892	    }
7893	}
7894    }
7895}
7896
7897
7898/* Perform common subexpression elimination.  Nonzero value from
7899   `cse_main' means that jumps were simplified and some code may now
7900   be unreachable, so do jump optimization again.  */
7901static bool
7902gate_handle_cse (void)
7903{
7904  return optimize > 0;
7905}
7906
7907static void
7908rest_of_handle_cse (void)
7909{
7910  int tem;
7911
7912  if (dump_file)
7913    dump_flow_info (dump_file);
7914
7915  reg_scan (get_insns (), max_reg_num ());
7916
7917  tem = cse_main (get_insns (), max_reg_num (), dump_file);
7918  if (tem)
7919    rebuild_jump_labels (get_insns ());
7920  if (purge_all_dead_edges ())
7921    delete_unreachable_blocks ();
7922
7923  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7924
7925  /* If we are not running more CSE passes, then we are no longer
7926     expecting CSE to be run.  But always rerun it in a cheap mode.  */
7927  cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7928
7929  if (tem)
7930    delete_dead_jumptables ();
7931
7932  if (tem || optimize > 1)
7933    cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
7934}
7935
7936struct tree_opt_pass pass_cse =
7937{
7938  "cse1",                               /* name */
7939  gate_handle_cse,                      /* gate */
7940  rest_of_handle_cse,			/* execute */
7941  NULL,                                 /* sub */
7942  NULL,                                 /* next */
7943  0,                                    /* static_pass_number */
7944  TV_CSE,                               /* tv_id */
7945  0,                                    /* properties_required */
7946  0,                                    /* properties_provided */
7947  0,                                    /* properties_destroyed */
7948  0,                                    /* todo_flags_start */
7949  TODO_dump_func |
7950  TODO_ggc_collect,                     /* todo_flags_finish */
7951  's'                                   /* letter */
7952};
7953
7954
7955static bool
7956gate_handle_cse2 (void)
7957{
7958  return optimize > 0 && flag_rerun_cse_after_loop;
7959}
7960
7961/* Run second CSE pass after loop optimizations.  */
7962static void
7963rest_of_handle_cse2 (void)
7964{
7965  int tem;
7966
7967  if (dump_file)
7968    dump_flow_info (dump_file);
7969
7970  tem = cse_main (get_insns (), max_reg_num (), dump_file);
7971
7972  /* Run a pass to eliminate duplicated assignments to condition code
7973     registers.  We have to run this after bypass_jumps, because it
7974     makes it harder for that pass to determine whether a jump can be
7975     bypassed safely.  */
7976  cse_condition_code_reg ();
7977
7978  purge_all_dead_edges ();
7979  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7980
7981  if (tem)
7982    {
7983      timevar_push (TV_JUMP);
7984      rebuild_jump_labels (get_insns ());
7985      delete_dead_jumptables ();
7986      cleanup_cfg (CLEANUP_EXPENSIVE);
7987      timevar_pop (TV_JUMP);
7988    }
7989  reg_scan (get_insns (), max_reg_num ());
7990  cse_not_expected = 1;
7991}
7992
7993
7994struct tree_opt_pass pass_cse2 =
7995{
7996  "cse2",                               /* name */
7997  gate_handle_cse2,                     /* gate */
7998  rest_of_handle_cse2,			/* execute */
7999  NULL,                                 /* sub */
8000  NULL,                                 /* next */
8001  0,                                    /* static_pass_number */
8002  TV_CSE2,                              /* tv_id */
8003  0,                                    /* properties_required */
8004  0,                                    /* properties_provided */
8005  0,                                    /* properties_destroyed */
8006  0,                                    /* todo_flags_start */
8007  TODO_dump_func |
8008  TODO_ggc_collect,                     /* todo_flags_finish */
8009  't'                                   /* letter */
8010};
8011
8012