cse.c revision 1.3
1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "rtl.h"
25#include "tm_p.h"
26#include "hard-reg-set.h"
27#include "regs.h"
28#include "basic-block.h"
29#include "flags.h"
30#include "insn-config.h"
31#include "recog.h"
32#include "function.h"
33#include "expr.h"
34#include "diagnostic-core.h"
35#include "toplev.h"
36#include "ggc.h"
37#include "except.h"
38#include "target.h"
39#include "params.h"
40#include "rtlhooks-def.h"
41#include "tree-pass.h"
42#include "df.h"
43#include "dbgcnt.h"
44#include "pointer-set.h"
45
46/* The basic idea of common subexpression elimination is to go
47   through the code, keeping a record of expressions that would
48   have the same value at the current scan point, and replacing
49   expressions encountered with the cheapest equivalent expression.
50
51   It is too complicated to keep track of the different possibilities
52   when control paths merge in this code; so, at each label, we forget all
53   that is known and start fresh.  This can be described as processing each
54   extended basic block separately.  We have a separate pass to perform
55   global CSE.
56
57   Note CSE can turn a conditional or computed jump into a nop or
58   an unconditional jump.  When this occurs we arrange to run the jump
59   optimizer after CSE to delete the unreachable code.
60
61   We use two data structures to record the equivalent expressions:
62   a hash table for most expressions, and a vector of "quantity
63   numbers" to record equivalent (pseudo) registers.
64
65   The use of the special data structure for registers is desirable
66   because it is faster.  It is possible because registers references
67   contain a fairly small number, the register number, taken from
68   a contiguously allocated series, and two register references are
69   identical if they have the same number.  General expressions
70   do not have any such thing, so the only way to retrieve the
71   information recorded on an expression other than a register
72   is to keep it in a hash table.
73
74Registers and "quantity numbers":
75
76   At the start of each basic block, all of the (hardware and pseudo)
77   registers used in the function are given distinct quantity
78   numbers to indicate their contents.  During scan, when the code
79   copies one register into another, we copy the quantity number.
80   When a register is loaded in any other way, we allocate a new
81   quantity number to describe the value generated by this operation.
82   `REG_QTY (N)' records what quantity register N is currently thought
83   of as containing.
84
85   All real quantity numbers are greater than or equal to zero.
86   If register N has not been assigned a quantity, `REG_QTY (N)' will
87   equal -N - 1, which is always negative.
88
89   Quantity numbers below zero do not exist and none of the `qty_table'
90   entries should be referenced with a negative index.
91
92   We also maintain a bidirectional chain of registers for each
93   quantity number.  The `qty_table` members `first_reg' and `last_reg',
94   and `reg_eqv_table' members `next' and `prev' hold these chains.
95
96   The first register in a chain is the one whose lifespan is least local.
97   Among equals, it is the one that was seen first.
98   We replace any equivalent register with that one.
99
100   If two registers have the same quantity number, it must be true that
101   REG expressions with qty_table `mode' must be in the hash table for both
102   registers and must be in the same class.
103
104   The converse is not true.  Since hard registers may be referenced in
105   any mode, two REG expressions might be equivalent in the hash table
106   but not have the same quantity number if the quantity number of one
107   of the registers is not the same mode as those expressions.
108
109Constants and quantity numbers
110
111   When a quantity has a known constant value, that value is stored
112   in the appropriate qty_table `const_rtx'.  This is in addition to
113   putting the constant in the hash table as is usual for non-regs.
114
115   Whether a reg or a constant is preferred is determined by the configuration
116   macro CONST_COSTS and will often depend on the constant value.  In any
117   event, expressions containing constants can be simplified, by fold_rtx.
118
119   When a quantity has a known nearly constant value (such as an address
120   of a stack slot), that value is stored in the appropriate qty_table
121   `const_rtx'.
122
123   Integer constants don't have a machine mode.  However, cse
124   determines the intended machine mode from the destination
125   of the instruction that moves the constant.  The machine mode
126   is recorded in the hash table along with the actual RTL
127   constant expression so that different modes are kept separate.
128
129Other expressions:
130
131   To record known equivalences among expressions in general
132   we use a hash table called `table'.  It has a fixed number of buckets
133   that contain chains of `struct table_elt' elements for expressions.
134   These chains connect the elements whose expressions have the same
135   hash codes.
136
137   Other chains through the same elements connect the elements which
138   currently have equivalent values.
139
140   Register references in an expression are canonicalized before hashing
141   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
142   The hash code of a register reference is computed using the quantity
143   number, not the register number.
144
145   When the value of an expression changes, it is necessary to remove from the
146   hash table not just that expression but all expressions whose values
147   could be different as a result.
148
149     1. If the value changing is in memory, except in special cases
150     ANYTHING referring to memory could be changed.  That is because
151     nobody knows where a pointer does not point.
152     The function `invalidate_memory' removes what is necessary.
153
154     The special cases are when the address is constant or is
155     a constant plus a fixed register such as the frame pointer
156     or a static chain pointer.  When such addresses are stored in,
157     we can tell exactly which other such addresses must be invalidated
158     due to overlap.  `invalidate' does this.
159     All expressions that refer to non-constant
160     memory addresses are also invalidated.  `invalidate_memory' does this.
161
162     2. If the value changing is a register, all expressions
163     containing references to that register, and only those,
164     must be removed.
165
166   Because searching the entire hash table for expressions that contain
167   a register is very slow, we try to figure out when it isn't necessary.
168   Precisely, this is necessary only when expressions have been
169   entered in the hash table using this register, and then the value has
170   changed, and then another expression wants to be added to refer to
171   the register's new value.  This sequence of circumstances is rare
172   within any one basic block.
173
174   `REG_TICK' and `REG_IN_TABLE', accessors for members of
175   cse_reg_info, are used to detect this case.  REG_TICK (i) is
176   incremented whenever a value is stored in register i.
177   REG_IN_TABLE (i) holds -1 if no references to register i have been
178   entered in the table; otherwise, it contains the value REG_TICK (i)
179   had when the references were entered.  If we want to enter a
180   reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
181   remove old references.  Until we want to enter a new entry, the
182   mere fact that the two vectors don't match makes the entries be
183   ignored if anyone tries to match them.
184
185   Registers themselves are entered in the hash table as well as in
186   the equivalent-register chains.  However, `REG_TICK' and
187   `REG_IN_TABLE' do not apply to expressions which are simple
188   register references.  These expressions are removed from the table
189   immediately when they become invalid, and this can be done even if
190   we do not immediately search for all the expressions that refer to
191   the register.
192
193   A CLOBBER rtx in an instruction invalidates its operand for further
194   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
195   invalidates everything that resides in memory.
196
197Related expressions:
198
199   Constant expressions that differ only by an additive integer
200   are called related.  When a constant expression is put in
201   the table, the related expression with no constant term
202   is also entered.  These are made to point at each other
203   so that it is possible to find out if there exists any
204   register equivalent to an expression related to a given expression.  */
205
206/* Length of qty_table vector.  We know in advance we will not need
207   a quantity number this big.  */
208
209static int max_qty;
210
211/* Next quantity number to be allocated.
212   This is 1 + the largest number needed so far.  */
213
214static int next_qty;
215
216/* Per-qty information tracking.
217
218   `first_reg' and `last_reg' track the head and tail of the
219   chain of registers which currently contain this quantity.
220
221   `mode' contains the machine mode of this quantity.
222
223   `const_rtx' holds the rtx of the constant value of this
224   quantity, if known.  A summations of the frame/arg pointer
225   and a constant can also be entered here.  When this holds
226   a known value, `const_insn' is the insn which stored the
227   constant value.
228
229   `comparison_{code,const,qty}' are used to track when a
230   comparison between a quantity and some constant or register has
231   been passed.  In such a case, we know the results of the comparison
232   in case we see it again.  These members record a comparison that
233   is known to be true.  `comparison_code' holds the rtx code of such
234   a comparison, else it is set to UNKNOWN and the other two
235   comparison members are undefined.  `comparison_const' holds
236   the constant being compared against, or zero if the comparison
237   is not against a constant.  `comparison_qty' holds the quantity
238   being compared against when the result is known.  If the comparison
239   is not with a register, `comparison_qty' is -1.  */
240
241struct qty_table_elem
242{
243  rtx const_rtx;
244  rtx const_insn;
245  rtx comparison_const;
246  int comparison_qty;
247  unsigned int first_reg, last_reg;
248  /* The sizes of these fields should match the sizes of the
249     code and mode fields of struct rtx_def (see rtl.h).  */
250  ENUM_BITFIELD(rtx_code) comparison_code : 16;
251  ENUM_BITFIELD(machine_mode) mode : 8;
252};
253
254/* The table of all qtys, indexed by qty number.  */
255static struct qty_table_elem *qty_table;
256
257/* Structure used to pass arguments via for_each_rtx to function
258   cse_change_cc_mode.  */
259struct change_cc_mode_args
260{
261  rtx insn;
262  rtx newreg;
263};
264
265#ifdef HAVE_cc0
266/* For machines that have a CC0, we do not record its value in the hash
267   table since its use is guaranteed to be the insn immediately following
268   its definition and any other insn is presumed to invalidate it.
269
270   Instead, we store below the current and last value assigned to CC0.
271   If it should happen to be a constant, it is stored in preference
272   to the actual assigned value.  In case it is a constant, we store
273   the mode in which the constant should be interpreted.  */
274
275static rtx this_insn_cc0, prev_insn_cc0;
276static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
277#endif
278
279/* Insn being scanned.  */
280
281static rtx this_insn;
282static bool optimize_this_for_speed_p;
283
284/* Index by register number, gives the number of the next (or
285   previous) register in the chain of registers sharing the same
286   value.
287
288   Or -1 if this register is at the end of the chain.
289
290   If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
291
292/* Per-register equivalence chain.  */
293struct reg_eqv_elem
294{
295  int next, prev;
296};
297
298/* The table of all register equivalence chains.  */
299static struct reg_eqv_elem *reg_eqv_table;
300
301struct cse_reg_info
302{
303  /* The timestamp at which this register is initialized.  */
304  unsigned int timestamp;
305
306  /* The quantity number of the register's current contents.  */
307  int reg_qty;
308
309  /* The number of times the register has been altered in the current
310     basic block.  */
311  int reg_tick;
312
313  /* The REG_TICK value at which rtx's containing this register are
314     valid in the hash table.  If this does not equal the current
315     reg_tick value, such expressions existing in the hash table are
316     invalid.  */
317  int reg_in_table;
318
319  /* The SUBREG that was set when REG_TICK was last incremented.  Set
320     to -1 if the last store was to the whole register, not a subreg.  */
321  unsigned int subreg_ticked;
322};
323
324/* A table of cse_reg_info indexed by register numbers.  */
325static struct cse_reg_info *cse_reg_info_table;
326
327/* The size of the above table.  */
328static unsigned int cse_reg_info_table_size;
329
330/* The index of the first entry that has not been initialized.  */
331static unsigned int cse_reg_info_table_first_uninitialized;
332
333/* The timestamp at the beginning of the current run of
334   cse_extended_basic_block.  We increment this variable at the beginning of
335   the current run of cse_extended_basic_block.  The timestamp field of a
336   cse_reg_info entry matches the value of this variable if and only
337   if the entry has been initialized during the current run of
338   cse_extended_basic_block.  */
339static unsigned int cse_reg_info_timestamp;
340
341/* A HARD_REG_SET containing all the hard registers for which there is
342   currently a REG expression in the hash table.  Note the difference
343   from the above variables, which indicate if the REG is mentioned in some
344   expression in the table.  */
345
346static HARD_REG_SET hard_regs_in_table;
347
348/* True if CSE has altered the CFG.  */
349static bool cse_cfg_altered;
350
351/* True if CSE has altered conditional jump insns in such a way
352   that jump optimization should be redone.  */
353static bool cse_jumps_altered;
354
355/* True if we put a LABEL_REF into the hash table for an INSN
356   without a REG_LABEL_OPERAND, we have to rerun jump after CSE
357   to put in the note.  */
358static bool recorded_label_ref;
359
360/* canon_hash stores 1 in do_not_record
361   if it notices a reference to CC0, PC, or some other volatile
362   subexpression.  */
363
364static int do_not_record;
365
366/* canon_hash stores 1 in hash_arg_in_memory
367   if it notices a reference to memory within the expression being hashed.  */
368
369static int hash_arg_in_memory;
370
371/* The hash table contains buckets which are chains of `struct table_elt's,
372   each recording one expression's information.
373   That expression is in the `exp' field.
374
375   The canon_exp field contains a canonical (from the point of view of
376   alias analysis) version of the `exp' field.
377
378   Those elements with the same hash code are chained in both directions
379   through the `next_same_hash' and `prev_same_hash' fields.
380
381   Each set of expressions with equivalent values
382   are on a two-way chain through the `next_same_value'
383   and `prev_same_value' fields, and all point with
384   the `first_same_value' field at the first element in
385   that chain.  The chain is in order of increasing cost.
386   Each element's cost value is in its `cost' field.
387
388   The `in_memory' field is nonzero for elements that
389   involve any reference to memory.  These elements are removed
390   whenever a write is done to an unidentified location in memory.
391   To be safe, we assume that a memory address is unidentified unless
392   the address is either a symbol constant or a constant plus
393   the frame pointer or argument pointer.
394
395   The `related_value' field is used to connect related expressions
396   (that differ by adding an integer).
397   The related expressions are chained in a circular fashion.
398   `related_value' is zero for expressions for which this
399   chain is not useful.
400
401   The `cost' field stores the cost of this element's expression.
402   The `regcost' field stores the value returned by approx_reg_cost for
403   this element's expression.
404
405   The `is_const' flag is set if the element is a constant (including
406   a fixed address).
407
408   The `flag' field is used as a temporary during some search routines.
409
410   The `mode' field is usually the same as GET_MODE (`exp'), but
411   if `exp' is a CONST_INT and has no machine mode then the `mode'
412   field is the mode it was being used as.  Each constant is
413   recorded separately for each mode it is used with.  */
414
415struct table_elt
416{
417  rtx exp;
418  rtx canon_exp;
419  struct table_elt *next_same_hash;
420  struct table_elt *prev_same_hash;
421  struct table_elt *next_same_value;
422  struct table_elt *prev_same_value;
423  struct table_elt *first_same_value;
424  struct table_elt *related_value;
425  int cost;
426  int regcost;
427  /* The size of this field should match the size
428     of the mode field of struct rtx_def (see rtl.h).  */
429  ENUM_BITFIELD(machine_mode) mode : 8;
430  char in_memory;
431  char is_const;
432  char flag;
433};
434
435/* We don't want a lot of buckets, because we rarely have very many
436   things stored in the hash table, and a lot of buckets slows
437   down a lot of loops that happen frequently.  */
438#define HASH_SHIFT	5
439#define HASH_SIZE	(1 << HASH_SHIFT)
440#define HASH_MASK	(HASH_SIZE - 1)
441
442/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
443   register (hard registers may require `do_not_record' to be set).  */
444
445#define HASH(X, M)	\
446 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
447  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
448  : canon_hash (X, M)) & HASH_MASK)
449
450/* Like HASH, but without side-effects.  */
451#define SAFE_HASH(X, M)	\
452 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
453  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
454  : safe_hash (X, M)) & HASH_MASK)
455
456/* Determine whether register number N is considered a fixed register for the
457   purpose of approximating register costs.
458   It is desirable to replace other regs with fixed regs, to reduce need for
459   non-fixed hard regs.
460   A reg wins if it is either the frame pointer or designated as fixed.  */
461#define FIXED_REGNO_P(N)  \
462  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
463   || fixed_regs[N] || global_regs[N])
464
465/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
466   hard registers and pointers into the frame are the cheapest with a cost
467   of 0.  Next come pseudos with a cost of one and other hard registers with
468   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
469
470#define CHEAP_REGNO(N)							\
471  (REGNO_PTR_FRAME_P(N)							\
472   || (HARD_REGISTER_NUM_P (N)						\
473       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
474
475#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
476#define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
477
478/* Get the number of times this register has been updated in this
479   basic block.  */
480
481#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
482
483/* Get the point at which REG was recorded in the table.  */
484
485#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
486
487/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
488   SUBREG).  */
489
490#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
491
492/* Get the quantity number for REG.  */
493
494#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
495
496/* Determine if the quantity number for register X represents a valid index
497   into the qty_table.  */
498
499#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
500
501/* Compare table_elt X and Y and return true iff X is cheaper than Y.  */
502
503#define CHEAPER(X, Y) \
504 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
505
506static struct table_elt *table[HASH_SIZE];
507
508/* Chain of `struct table_elt's made so far for this function
509   but currently removed from the table.  */
510
511static struct table_elt *free_element_chain;
512
513/* Set to the cost of a constant pool reference if one was found for a
514   symbolic constant.  If this was found, it means we should try to
515   convert constants into constant pool entries if they don't fit in
516   the insn.  */
517
518static int constant_pool_entries_cost;
519static int constant_pool_entries_regcost;
520
521/* Trace a patch through the CFG.  */
522
523struct branch_path
524{
525  /* The basic block for this path entry.  */
526  basic_block bb;
527};
528
529/* This data describes a block that will be processed by
530   cse_extended_basic_block.  */
531
532struct cse_basic_block_data
533{
534  /* Total number of SETs in block.  */
535  int nsets;
536  /* Size of current branch path, if any.  */
537  int path_size;
538  /* Current path, indicating which basic_blocks will be processed.  */
539  struct branch_path *path;
540};
541
542
543/* Pointers to the live in/live out bitmaps for the boundaries of the
544   current EBB.  */
545static bitmap cse_ebb_live_in, cse_ebb_live_out;
546
547/* A simple bitmap to track which basic blocks have been visited
548   already as part of an already processed extended basic block.  */
549static sbitmap cse_visited_basic_blocks;
550
551static bool fixed_base_plus_p (rtx x);
552static int notreg_cost (rtx, enum rtx_code, int);
553static int approx_reg_cost_1 (rtx *, void *);
554static int approx_reg_cost (rtx);
555static int preferable (int, int, int, int);
556static void new_basic_block (void);
557static void make_new_qty (unsigned int, enum machine_mode);
558static void make_regs_eqv (unsigned int, unsigned int);
559static void delete_reg_equiv (unsigned int);
560static int mention_regs (rtx);
561static int insert_regs (rtx, struct table_elt *, int);
562static void remove_from_table (struct table_elt *, unsigned);
563static void remove_pseudo_from_table (rtx, unsigned);
564static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
565static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
566static rtx lookup_as_function (rtx, enum rtx_code);
567static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
568					    enum machine_mode, int, int);
569static struct table_elt *insert (rtx, struct table_elt *, unsigned,
570				 enum machine_mode);
571static void merge_equiv_classes (struct table_elt *, struct table_elt *);
572static void invalidate (rtx, enum machine_mode);
573static void remove_invalid_refs (unsigned int);
574static void remove_invalid_subreg_refs (unsigned int, unsigned int,
575					enum machine_mode);
576static void rehash_using_reg (rtx);
577static void invalidate_memory (void);
578static void invalidate_for_call (void);
579static rtx use_related_value (rtx, struct table_elt *);
580
581static inline unsigned canon_hash (rtx, enum machine_mode);
582static inline unsigned safe_hash (rtx, enum machine_mode);
583static inline unsigned hash_rtx_string (const char *);
584
585static rtx canon_reg (rtx, rtx);
586static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
587					   enum machine_mode *,
588					   enum machine_mode *);
589static rtx fold_rtx (rtx, rtx);
590static rtx equiv_constant (rtx);
591static void record_jump_equiv (rtx, bool);
592static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
593			      int);
594static void cse_insn (rtx);
595static void cse_prescan_path (struct cse_basic_block_data *);
596static void invalidate_from_clobbers (rtx);
597static void invalidate_from_sets_and_clobbers (rtx);
598static rtx cse_process_notes (rtx, rtx, bool *);
599static void cse_extended_basic_block (struct cse_basic_block_data *);
600static int check_for_label_ref (rtx *, void *);
601extern void dump_class (struct table_elt*);
602static void get_cse_reg_info_1 (unsigned int regno);
603static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
604static int check_dependence (rtx *, void *);
605
606static void flush_hash_table (void);
607static bool insn_live_p (rtx, int *);
608static bool set_live_p (rtx, rtx, int *);
609static int cse_change_cc_mode (rtx *, void *);
610static void cse_change_cc_mode_insn (rtx, rtx);
611static void cse_change_cc_mode_insns (rtx, rtx, rtx);
612static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
613				       bool);
614
615
616#undef RTL_HOOKS_GEN_LOWPART
617#define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
618
619static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
620
621/* Nonzero if X has the form (PLUS frame-pointer integer).  */
622
623static bool
624fixed_base_plus_p (rtx x)
625{
626  switch (GET_CODE (x))
627    {
628    case REG:
629      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
630	return true;
631      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
632	return true;
633      return false;
634
635    case PLUS:
636      if (!CONST_INT_P (XEXP (x, 1)))
637	return false;
638      return fixed_base_plus_p (XEXP (x, 0));
639
640    default:
641      return false;
642    }
643}
644
645/* Dump the expressions in the equivalence class indicated by CLASSP.
646   This function is used only for debugging.  */
647DEBUG_FUNCTION void
648dump_class (struct table_elt *classp)
649{
650  struct table_elt *elt;
651
652  fprintf (stderr, "Equivalence chain for ");
653  print_rtl (stderr, classp->exp);
654  fprintf (stderr, ": \n");
655
656  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
657    {
658      print_rtl (stderr, elt->exp);
659      fprintf (stderr, "\n");
660    }
661}
662
663/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
664
665static int
666approx_reg_cost_1 (rtx *xp, void *data)
667{
668  rtx x = *xp;
669  int *cost_p = (int *) data;
670
671  if (x && REG_P (x))
672    {
673      unsigned int regno = REGNO (x);
674
675      if (! CHEAP_REGNO (regno))
676	{
677	  if (regno < FIRST_PSEUDO_REGISTER)
678	    {
679	      if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
680		return 1;
681	      *cost_p += 2;
682	    }
683	  else
684	    *cost_p += 1;
685	}
686    }
687
688  return 0;
689}
690
691/* Return an estimate of the cost of the registers used in an rtx.
692   This is mostly the number of different REG expressions in the rtx;
693   however for some exceptions like fixed registers we use a cost of
694   0.  If any other hard register reference occurs, return MAX_COST.  */
695
696static int
697approx_reg_cost (rtx x)
698{
699  int cost = 0;
700
701  if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
702    return MAX_COST;
703
704  return cost;
705}
706
707/* Return a negative value if an rtx A, whose costs are given by COST_A
708   and REGCOST_A, is more desirable than an rtx B.
709   Return a positive value if A is less desirable, or 0 if the two are
710   equally good.  */
711static int
712preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
713{
714  /* First, get rid of cases involving expressions that are entirely
715     unwanted.  */
716  if (cost_a != cost_b)
717    {
718      if (cost_a == MAX_COST)
719	return 1;
720      if (cost_b == MAX_COST)
721	return -1;
722    }
723
724  /* Avoid extending lifetimes of hardregs.  */
725  if (regcost_a != regcost_b)
726    {
727      if (regcost_a == MAX_COST)
728	return 1;
729      if (regcost_b == MAX_COST)
730	return -1;
731    }
732
733  /* Normal operation costs take precedence.  */
734  if (cost_a != cost_b)
735    return cost_a - cost_b;
736  /* Only if these are identical consider effects on register pressure.  */
737  if (regcost_a != regcost_b)
738    return regcost_a - regcost_b;
739  return 0;
740}
741
742/* Internal function, to compute cost when X is not a register; called
743   from COST macro to keep it simple.  */
744
745static int
746notreg_cost (rtx x, enum rtx_code outer, int opno)
747{
748  return ((GET_CODE (x) == SUBREG
749	   && REG_P (SUBREG_REG (x))
750	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
751	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
752	   && (GET_MODE_SIZE (GET_MODE (x))
753	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
754	   && subreg_lowpart_p (x)
755	   && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x),
756					     GET_MODE (SUBREG_REG (x))))
757	  ? 0
758	  : rtx_cost (x, outer, opno, optimize_this_for_speed_p) * 2);
759}
760
761
762/* Initialize CSE_REG_INFO_TABLE.  */
763
764static void
765init_cse_reg_info (unsigned int nregs)
766{
767  /* Do we need to grow the table?  */
768  if (nregs > cse_reg_info_table_size)
769    {
770      unsigned int new_size;
771
772      if (cse_reg_info_table_size < 2048)
773	{
774	  /* Compute a new size that is a power of 2 and no smaller
775	     than the large of NREGS and 64.  */
776	  new_size = (cse_reg_info_table_size
777		      ? cse_reg_info_table_size : 64);
778
779	  while (new_size < nregs)
780	    new_size *= 2;
781	}
782      else
783	{
784	  /* If we need a big table, allocate just enough to hold
785	     NREGS registers.  */
786	  new_size = nregs;
787	}
788
789      /* Reallocate the table with NEW_SIZE entries.  */
790      free (cse_reg_info_table);
791      cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
792      cse_reg_info_table_size = new_size;
793      cse_reg_info_table_first_uninitialized = 0;
794    }
795
796  /* Do we have all of the first NREGS entries initialized?  */
797  if (cse_reg_info_table_first_uninitialized < nregs)
798    {
799      unsigned int old_timestamp = cse_reg_info_timestamp - 1;
800      unsigned int i;
801
802      /* Put the old timestamp on newly allocated entries so that they
803	 will all be considered out of date.  We do not touch those
804	 entries beyond the first NREGS entries to be nice to the
805	 virtual memory.  */
806      for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
807	cse_reg_info_table[i].timestamp = old_timestamp;
808
809      cse_reg_info_table_first_uninitialized = nregs;
810    }
811}
812
813/* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
814
815static void
816get_cse_reg_info_1 (unsigned int regno)
817{
818  /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
819     entry will be considered to have been initialized.  */
820  cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
821
822  /* Initialize the rest of the entry.  */
823  cse_reg_info_table[regno].reg_tick = 1;
824  cse_reg_info_table[regno].reg_in_table = -1;
825  cse_reg_info_table[regno].subreg_ticked = -1;
826  cse_reg_info_table[regno].reg_qty = -regno - 1;
827}
828
829/* Find a cse_reg_info entry for REGNO.  */
830
831static inline struct cse_reg_info *
832get_cse_reg_info (unsigned int regno)
833{
834  struct cse_reg_info *p = &cse_reg_info_table[regno];
835
836  /* If this entry has not been initialized, go ahead and initialize
837     it.  */
838  if (p->timestamp != cse_reg_info_timestamp)
839    get_cse_reg_info_1 (regno);
840
841  return p;
842}
843
844/* Clear the hash table and initialize each register with its own quantity,
845   for a new basic block.  */
846
847static void
848new_basic_block (void)
849{
850  int i;
851
852  next_qty = 0;
853
854  /* Invalidate cse_reg_info_table.  */
855  cse_reg_info_timestamp++;
856
857  /* Clear out hash table state for this pass.  */
858  CLEAR_HARD_REG_SET (hard_regs_in_table);
859
860  /* The per-quantity values used to be initialized here, but it is
861     much faster to initialize each as it is made in `make_new_qty'.  */
862
863  for (i = 0; i < HASH_SIZE; i++)
864    {
865      struct table_elt *first;
866
867      first = table[i];
868      if (first != NULL)
869	{
870	  struct table_elt *last = first;
871
872	  table[i] = NULL;
873
874	  while (last->next_same_hash != NULL)
875	    last = last->next_same_hash;
876
877	  /* Now relink this hash entire chain into
878	     the free element list.  */
879
880	  last->next_same_hash = free_element_chain;
881	  free_element_chain = first;
882	}
883    }
884
885#ifdef HAVE_cc0
886  prev_insn_cc0 = 0;
887#endif
888}
889
890/* Say that register REG contains a quantity in mode MODE not in any
891   register before and initialize that quantity.  */
892
893static void
894make_new_qty (unsigned int reg, enum machine_mode mode)
895{
896  int q;
897  struct qty_table_elem *ent;
898  struct reg_eqv_elem *eqv;
899
900  gcc_assert (next_qty < max_qty);
901
902  q = REG_QTY (reg) = next_qty++;
903  ent = &qty_table[q];
904  ent->first_reg = reg;
905  ent->last_reg = reg;
906  ent->mode = mode;
907  ent->const_rtx = ent->const_insn = NULL_RTX;
908  ent->comparison_code = UNKNOWN;
909
910  eqv = &reg_eqv_table[reg];
911  eqv->next = eqv->prev = -1;
912}
913
914/* Make reg NEW equivalent to reg OLD.
915   OLD is not changing; NEW is.  */
916
917static void
918make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
919{
920  unsigned int lastr, firstr;
921  int q = REG_QTY (old_reg);
922  struct qty_table_elem *ent;
923
924  ent = &qty_table[q];
925
926  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
927  gcc_assert (REGNO_QTY_VALID_P (old_reg));
928
929  REG_QTY (new_reg) = q;
930  firstr = ent->first_reg;
931  lastr = ent->last_reg;
932
933  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
934     hard regs.  Among pseudos, if NEW will live longer than any other reg
935     of the same qty, and that is beyond the current basic block,
936     make it the new canonical replacement for this qty.  */
937  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
938      /* Certain fixed registers might be of the class NO_REGS.  This means
939	 that not only can they not be allocated by the compiler, but
940	 they cannot be used in substitutions or canonicalizations
941	 either.  */
942      && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
943      && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
944	  || (new_reg >= FIRST_PSEUDO_REGISTER
945	      && (firstr < FIRST_PSEUDO_REGISTER
946		  || (bitmap_bit_p (cse_ebb_live_out, new_reg)
947		      && !bitmap_bit_p (cse_ebb_live_out, firstr))
948		  || (bitmap_bit_p (cse_ebb_live_in, new_reg)
949		      && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
950    {
951      reg_eqv_table[firstr].prev = new_reg;
952      reg_eqv_table[new_reg].next = firstr;
953      reg_eqv_table[new_reg].prev = -1;
954      ent->first_reg = new_reg;
955    }
956  else
957    {
958      /* If NEW is a hard reg (known to be non-fixed), insert at end.
959	 Otherwise, insert before any non-fixed hard regs that are at the
960	 end.  Registers of class NO_REGS cannot be used as an
961	 equivalent for anything.  */
962      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
963	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
964	     && new_reg >= FIRST_PSEUDO_REGISTER)
965	lastr = reg_eqv_table[lastr].prev;
966      reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
967      if (reg_eqv_table[lastr].next >= 0)
968	reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
969      else
970	qty_table[q].last_reg = new_reg;
971      reg_eqv_table[lastr].next = new_reg;
972      reg_eqv_table[new_reg].prev = lastr;
973    }
974}
975
976/* Remove REG from its equivalence class.  */
977
978static void
979delete_reg_equiv (unsigned int reg)
980{
981  struct qty_table_elem *ent;
982  int q = REG_QTY (reg);
983  int p, n;
984
985  /* If invalid, do nothing.  */
986  if (! REGNO_QTY_VALID_P (reg))
987    return;
988
989  ent = &qty_table[q];
990
991  p = reg_eqv_table[reg].prev;
992  n = reg_eqv_table[reg].next;
993
994  if (n != -1)
995    reg_eqv_table[n].prev = p;
996  else
997    ent->last_reg = p;
998  if (p != -1)
999    reg_eqv_table[p].next = n;
1000  else
1001    ent->first_reg = n;
1002
1003  REG_QTY (reg) = -reg - 1;
1004}
1005
1006/* Remove any invalid expressions from the hash table
1007   that refer to any of the registers contained in expression X.
1008
1009   Make sure that newly inserted references to those registers
1010   as subexpressions will be considered valid.
1011
1012   mention_regs is not called when a register itself
1013   is being stored in the table.
1014
1015   Return 1 if we have done something that may have changed the hash code
1016   of X.  */
1017
1018static int
1019mention_regs (rtx x)
1020{
1021  enum rtx_code code;
1022  int i, j;
1023  const char *fmt;
1024  int changed = 0;
1025
1026  if (x == 0)
1027    return 0;
1028
1029  code = GET_CODE (x);
1030  if (code == REG)
1031    {
1032      unsigned int regno = REGNO (x);
1033      unsigned int endregno = END_REGNO (x);
1034      unsigned int i;
1035
1036      for (i = regno; i < endregno; i++)
1037	{
1038	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1039	    remove_invalid_refs (i);
1040
1041	  REG_IN_TABLE (i) = REG_TICK (i);
1042	  SUBREG_TICKED (i) = -1;
1043	}
1044
1045      return 0;
1046    }
1047
1048  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1049     pseudo if they don't use overlapping words.  We handle only pseudos
1050     here for simplicity.  */
1051  if (code == SUBREG && REG_P (SUBREG_REG (x))
1052      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1053    {
1054      unsigned int i = REGNO (SUBREG_REG (x));
1055
1056      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1057	{
1058	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1059	     the last store to this register really stored into this
1060	     subreg, then remove the memory of this subreg.
1061	     Otherwise, remove any memory of the entire register and
1062	     all its subregs from the table.  */
1063	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1064	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1065	    remove_invalid_refs (i);
1066	  else
1067	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1068	}
1069
1070      REG_IN_TABLE (i) = REG_TICK (i);
1071      SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1072      return 0;
1073    }
1074
1075  /* If X is a comparison or a COMPARE and either operand is a register
1076     that does not have a quantity, give it one.  This is so that a later
1077     call to record_jump_equiv won't cause X to be assigned a different
1078     hash code and not found in the table after that call.
1079
1080     It is not necessary to do this here, since rehash_using_reg can
1081     fix up the table later, but doing this here eliminates the need to
1082     call that expensive function in the most common case where the only
1083     use of the register is in the comparison.  */
1084
1085  if (code == COMPARE || COMPARISON_P (x))
1086    {
1087      if (REG_P (XEXP (x, 0))
1088	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1089	if (insert_regs (XEXP (x, 0), NULL, 0))
1090	  {
1091	    rehash_using_reg (XEXP (x, 0));
1092	    changed = 1;
1093	  }
1094
1095      if (REG_P (XEXP (x, 1))
1096	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1097	if (insert_regs (XEXP (x, 1), NULL, 0))
1098	  {
1099	    rehash_using_reg (XEXP (x, 1));
1100	    changed = 1;
1101	  }
1102    }
1103
1104  fmt = GET_RTX_FORMAT (code);
1105  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1106    if (fmt[i] == 'e')
1107      changed |= mention_regs (XEXP (x, i));
1108    else if (fmt[i] == 'E')
1109      for (j = 0; j < XVECLEN (x, i); j++)
1110	changed |= mention_regs (XVECEXP (x, i, j));
1111
1112  return changed;
1113}
1114
1115/* Update the register quantities for inserting X into the hash table
1116   with a value equivalent to CLASSP.
1117   (If the class does not contain a REG, it is irrelevant.)
1118   If MODIFIED is nonzero, X is a destination; it is being modified.
1119   Note that delete_reg_equiv should be called on a register
1120   before insert_regs is done on that register with MODIFIED != 0.
1121
1122   Nonzero value means that elements of reg_qty have changed
1123   so X's hash code may be different.  */
1124
1125static int
1126insert_regs (rtx x, struct table_elt *classp, int modified)
1127{
1128  if (REG_P (x))
1129    {
1130      unsigned int regno = REGNO (x);
1131      int qty_valid;
1132
1133      /* If REGNO is in the equivalence table already but is of the
1134	 wrong mode for that equivalence, don't do anything here.  */
1135
1136      qty_valid = REGNO_QTY_VALID_P (regno);
1137      if (qty_valid)
1138	{
1139	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1140
1141	  if (ent->mode != GET_MODE (x))
1142	    return 0;
1143	}
1144
1145      if (modified || ! qty_valid)
1146	{
1147	  if (classp)
1148	    for (classp = classp->first_same_value;
1149		 classp != 0;
1150		 classp = classp->next_same_value)
1151	      if (REG_P (classp->exp)
1152		  && GET_MODE (classp->exp) == GET_MODE (x))
1153		{
1154		  unsigned c_regno = REGNO (classp->exp);
1155
1156		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1157
1158		  /* Suppose that 5 is hard reg and 100 and 101 are
1159		     pseudos.  Consider
1160
1161		     (set (reg:si 100) (reg:si 5))
1162		     (set (reg:si 5) (reg:si 100))
1163		     (set (reg:di 101) (reg:di 5))
1164
1165		     We would now set REG_QTY (101) = REG_QTY (5), but the
1166		     entry for 5 is in SImode.  When we use this later in
1167		     copy propagation, we get the register in wrong mode.  */
1168		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1169		    continue;
1170
1171		  make_regs_eqv (regno, c_regno);
1172		  return 1;
1173		}
1174
1175	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1176	     than REG_IN_TABLE to find out if there was only a single preceding
1177	     invalidation - for the SUBREG - or another one, which would be
1178	     for the full register.  However, if we find here that REG_TICK
1179	     indicates that the register is invalid, it means that it has
1180	     been invalidated in a separate operation.  The SUBREG might be used
1181	     now (then this is a recursive call), or we might use the full REG
1182	     now and a SUBREG of it later.  So bump up REG_TICK so that
1183	     mention_regs will do the right thing.  */
1184	  if (! modified
1185	      && REG_IN_TABLE (regno) >= 0
1186	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1187	    REG_TICK (regno)++;
1188	  make_new_qty (regno, GET_MODE (x));
1189	  return 1;
1190	}
1191
1192      return 0;
1193    }
1194
1195  /* If X is a SUBREG, we will likely be inserting the inner register in the
1196     table.  If that register doesn't have an assigned quantity number at
1197     this point but does later, the insertion that we will be doing now will
1198     not be accessible because its hash code will have changed.  So assign
1199     a quantity number now.  */
1200
1201  else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1202	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1203    {
1204      insert_regs (SUBREG_REG (x), NULL, 0);
1205      mention_regs (x);
1206      return 1;
1207    }
1208  else
1209    return mention_regs (x);
1210}
1211
1212
1213/* Compute upper and lower anchors for CST.  Also compute the offset of CST
1214   from these anchors/bases such that *_BASE + *_OFFS = CST.  Return false iff
1215   CST is equal to an anchor.  */
1216
1217static bool
1218compute_const_anchors (rtx cst,
1219		       HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1220		       HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1221{
1222  HOST_WIDE_INT n = INTVAL (cst);
1223
1224  *lower_base = n & ~(targetm.const_anchor - 1);
1225  if (*lower_base == n)
1226    return false;
1227
1228  *upper_base =
1229    (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1230  *upper_offs = n - *upper_base;
1231  *lower_offs = n - *lower_base;
1232  return true;
1233}
1234
1235/* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE.  */
1236
1237static void
1238insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1239		     enum machine_mode mode)
1240{
1241  struct table_elt *elt;
1242  unsigned hash;
1243  rtx anchor_exp;
1244  rtx exp;
1245
1246  anchor_exp = GEN_INT (anchor);
1247  hash = HASH (anchor_exp, mode);
1248  elt = lookup (anchor_exp, hash, mode);
1249  if (!elt)
1250    elt = insert (anchor_exp, NULL, hash, mode);
1251
1252  exp = plus_constant (mode, reg, offs);
1253  /* REG has just been inserted and the hash codes recomputed.  */
1254  mention_regs (exp);
1255  hash = HASH (exp, mode);
1256
1257  /* Use the cost of the register rather than the whole expression.  When
1258     looking up constant anchors we will further offset the corresponding
1259     expression therefore it does not make sense to prefer REGs over
1260     reg-immediate additions.  Prefer instead the oldest expression.  Also
1261     don't prefer pseudos over hard regs so that we derive constants in
1262     argument registers from other argument registers rather than from the
1263     original pseudo that was used to synthesize the constant.  */
1264  insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1265}
1266
1267/* The constant CST is equivalent to the register REG.  Create
1268   equivalences between the two anchors of CST and the corresponding
1269   register-offset expressions using REG.  */
1270
1271static void
1272insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
1273{
1274  HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1275
1276  if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1277			      &upper_base, &upper_offs))
1278      return;
1279
1280  /* Ignore anchors of value 0.  Constants accessible from zero are
1281     simple.  */
1282  if (lower_base != 0)
1283    insert_const_anchor (lower_base, reg, -lower_offs, mode);
1284
1285  if (upper_base != 0)
1286    insert_const_anchor (upper_base, reg, -upper_offs, mode);
1287}
1288
1289/* We need to express ANCHOR_ELT->exp + OFFS.  Walk the equivalence list of
1290   ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1291   valid expression.  Return the cheapest and oldest of such expressions.  In
1292   *OLD, return how old the resulting expression is compared to the other
1293   equivalent expressions.  */
1294
1295static rtx
1296find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1297			   unsigned *old)
1298{
1299  struct table_elt *elt;
1300  unsigned idx;
1301  struct table_elt *match_elt;
1302  rtx match;
1303
1304  /* Find the cheapest and *oldest* expression to maximize the chance of
1305     reusing the same pseudo.  */
1306
1307  match_elt = NULL;
1308  match = NULL_RTX;
1309  for (elt = anchor_elt->first_same_value, idx = 0;
1310       elt;
1311       elt = elt->next_same_value, idx++)
1312    {
1313      if (match_elt && CHEAPER (match_elt, elt))
1314	return match;
1315
1316      if (REG_P (elt->exp)
1317	  || (GET_CODE (elt->exp) == PLUS
1318	      && REG_P (XEXP (elt->exp, 0))
1319	      && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1320	{
1321	  rtx x;
1322
1323	  /* Ignore expressions that are no longer valid.  */
1324	  if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1325	    continue;
1326
1327	  x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
1328	  if (REG_P (x)
1329	      || (GET_CODE (x) == PLUS
1330		  && IN_RANGE (INTVAL (XEXP (x, 1)),
1331			       -targetm.const_anchor,
1332			       targetm.const_anchor - 1)))
1333	    {
1334	      match = x;
1335	      match_elt = elt;
1336	      *old = idx;
1337	    }
1338	}
1339    }
1340
1341  return match;
1342}
1343
1344/* Try to express the constant SRC_CONST using a register+offset expression
1345   derived from a constant anchor.  Return it if successful or NULL_RTX,
1346   otherwise.  */
1347
1348static rtx
1349try_const_anchors (rtx src_const, enum machine_mode mode)
1350{
1351  struct table_elt *lower_elt, *upper_elt;
1352  HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1353  rtx lower_anchor_rtx, upper_anchor_rtx;
1354  rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1355  unsigned lower_old, upper_old;
1356
1357  if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1358			      &upper_base, &upper_offs))
1359    return NULL_RTX;
1360
1361  lower_anchor_rtx = GEN_INT (lower_base);
1362  upper_anchor_rtx = GEN_INT (upper_base);
1363  lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1364  upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1365
1366  if (lower_elt)
1367    lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1368  if (upper_elt)
1369    upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1370
1371  if (!lower_exp)
1372    return upper_exp;
1373  if (!upper_exp)
1374    return lower_exp;
1375
1376  /* Return the older expression.  */
1377  return (upper_old > lower_old ? upper_exp : lower_exp);
1378}
1379
1380/* Look in or update the hash table.  */
1381
1382/* Remove table element ELT from use in the table.
1383   HASH is its hash code, made using the HASH macro.
1384   It's an argument because often that is known in advance
1385   and we save much time not recomputing it.  */
1386
1387static void
1388remove_from_table (struct table_elt *elt, unsigned int hash)
1389{
1390  if (elt == 0)
1391    return;
1392
1393  /* Mark this element as removed.  See cse_insn.  */
1394  elt->first_same_value = 0;
1395
1396  /* Remove the table element from its equivalence class.  */
1397
1398  {
1399    struct table_elt *prev = elt->prev_same_value;
1400    struct table_elt *next = elt->next_same_value;
1401
1402    if (next)
1403      next->prev_same_value = prev;
1404
1405    if (prev)
1406      prev->next_same_value = next;
1407    else
1408      {
1409	struct table_elt *newfirst = next;
1410	while (next)
1411	  {
1412	    next->first_same_value = newfirst;
1413	    next = next->next_same_value;
1414	  }
1415      }
1416  }
1417
1418  /* Remove the table element from its hash bucket.  */
1419
1420  {
1421    struct table_elt *prev = elt->prev_same_hash;
1422    struct table_elt *next = elt->next_same_hash;
1423
1424    if (next)
1425      next->prev_same_hash = prev;
1426
1427    if (prev)
1428      prev->next_same_hash = next;
1429    else if (table[hash] == elt)
1430      table[hash] = next;
1431    else
1432      {
1433	/* This entry is not in the proper hash bucket.  This can happen
1434	   when two classes were merged by `merge_equiv_classes'.  Search
1435	   for the hash bucket that it heads.  This happens only very
1436	   rarely, so the cost is acceptable.  */
1437	for (hash = 0; hash < HASH_SIZE; hash++)
1438	  if (table[hash] == elt)
1439	    table[hash] = next;
1440      }
1441  }
1442
1443  /* Remove the table element from its related-value circular chain.  */
1444
1445  if (elt->related_value != 0 && elt->related_value != elt)
1446    {
1447      struct table_elt *p = elt->related_value;
1448
1449      while (p->related_value != elt)
1450	p = p->related_value;
1451      p->related_value = elt->related_value;
1452      if (p->related_value == p)
1453	p->related_value = 0;
1454    }
1455
1456  /* Now add it to the free element chain.  */
1457  elt->next_same_hash = free_element_chain;
1458  free_element_chain = elt;
1459}
1460
1461/* Same as above, but X is a pseudo-register.  */
1462
1463static void
1464remove_pseudo_from_table (rtx x, unsigned int hash)
1465{
1466  struct table_elt *elt;
1467
1468  /* Because a pseudo-register can be referenced in more than one
1469     mode, we might have to remove more than one table entry.  */
1470  while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1471    remove_from_table (elt, hash);
1472}
1473
1474/* Look up X in the hash table and return its table element,
1475   or 0 if X is not in the table.
1476
1477   MODE is the machine-mode of X, or if X is an integer constant
1478   with VOIDmode then MODE is the mode with which X will be used.
1479
1480   Here we are satisfied to find an expression whose tree structure
1481   looks like X.  */
1482
1483static struct table_elt *
1484lookup (rtx x, unsigned int hash, enum machine_mode mode)
1485{
1486  struct table_elt *p;
1487
1488  for (p = table[hash]; p; p = p->next_same_hash)
1489    if (mode == p->mode && ((x == p->exp && REG_P (x))
1490			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1491      return p;
1492
1493  return 0;
1494}
1495
1496/* Like `lookup' but don't care whether the table element uses invalid regs.
1497   Also ignore discrepancies in the machine mode of a register.  */
1498
1499static struct table_elt *
1500lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1501{
1502  struct table_elt *p;
1503
1504  if (REG_P (x))
1505    {
1506      unsigned int regno = REGNO (x);
1507
1508      /* Don't check the machine mode when comparing registers;
1509	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1510      for (p = table[hash]; p; p = p->next_same_hash)
1511	if (REG_P (p->exp)
1512	    && REGNO (p->exp) == regno)
1513	  return p;
1514    }
1515  else
1516    {
1517      for (p = table[hash]; p; p = p->next_same_hash)
1518	if (mode == p->mode
1519	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1520	  return p;
1521    }
1522
1523  return 0;
1524}
1525
1526/* Look for an expression equivalent to X and with code CODE.
1527   If one is found, return that expression.  */
1528
1529static rtx
1530lookup_as_function (rtx x, enum rtx_code code)
1531{
1532  struct table_elt *p
1533    = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1534
1535  if (p == 0)
1536    return 0;
1537
1538  for (p = p->first_same_value; p; p = p->next_same_value)
1539    if (GET_CODE (p->exp) == code
1540	/* Make sure this is a valid entry in the table.  */
1541	&& exp_equiv_p (p->exp, p->exp, 1, false))
1542      return p->exp;
1543
1544  return 0;
1545}
1546
1547/* Insert X in the hash table, assuming HASH is its hash code and
1548   CLASSP is an element of the class it should go in (or 0 if a new
1549   class should be made).  COST is the code of X and reg_cost is the
1550   cost of registers in X.  It is inserted at the proper position to
1551   keep the class in the order cheapest first.
1552
1553   MODE is the machine-mode of X, or if X is an integer constant
1554   with VOIDmode then MODE is the mode with which X will be used.
1555
1556   For elements of equal cheapness, the most recent one
1557   goes in front, except that the first element in the list
1558   remains first unless a cheaper element is added.  The order of
1559   pseudo-registers does not matter, as canon_reg will be called to
1560   find the cheapest when a register is retrieved from the table.
1561
1562   The in_memory field in the hash table element is set to 0.
1563   The caller must set it nonzero if appropriate.
1564
1565   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1566   and if insert_regs returns a nonzero value
1567   you must then recompute its hash code before calling here.
1568
1569   If necessary, update table showing constant values of quantities.  */
1570
1571static struct table_elt *
1572insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1573		   enum machine_mode mode, int cost, int reg_cost)
1574{
1575  struct table_elt *elt;
1576
1577  /* If X is a register and we haven't made a quantity for it,
1578     something is wrong.  */
1579  gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1580
1581  /* If X is a hard register, show it is being put in the table.  */
1582  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1583    add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1584
1585  /* Put an element for X into the right hash bucket.  */
1586
1587  elt = free_element_chain;
1588  if (elt)
1589    free_element_chain = elt->next_same_hash;
1590  else
1591    elt = XNEW (struct table_elt);
1592
1593  elt->exp = x;
1594  elt->canon_exp = NULL_RTX;
1595  elt->cost = cost;
1596  elt->regcost = reg_cost;
1597  elt->next_same_value = 0;
1598  elt->prev_same_value = 0;
1599  elt->next_same_hash = table[hash];
1600  elt->prev_same_hash = 0;
1601  elt->related_value = 0;
1602  elt->in_memory = 0;
1603  elt->mode = mode;
1604  elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1605
1606  if (table[hash])
1607    table[hash]->prev_same_hash = elt;
1608  table[hash] = elt;
1609
1610  /* Put it into the proper value-class.  */
1611  if (classp)
1612    {
1613      classp = classp->first_same_value;
1614      if (CHEAPER (elt, classp))
1615	/* Insert at the head of the class.  */
1616	{
1617	  struct table_elt *p;
1618	  elt->next_same_value = classp;
1619	  classp->prev_same_value = elt;
1620	  elt->first_same_value = elt;
1621
1622	  for (p = classp; p; p = p->next_same_value)
1623	    p->first_same_value = elt;
1624	}
1625      else
1626	{
1627	  /* Insert not at head of the class.  */
1628	  /* Put it after the last element cheaper than X.  */
1629	  struct table_elt *p, *next;
1630
1631	  for (p = classp;
1632	       (next = p->next_same_value) && CHEAPER (next, elt);
1633	       p = next)
1634	    ;
1635
1636	  /* Put it after P and before NEXT.  */
1637	  elt->next_same_value = next;
1638	  if (next)
1639	    next->prev_same_value = elt;
1640
1641	  elt->prev_same_value = p;
1642	  p->next_same_value = elt;
1643	  elt->first_same_value = classp;
1644	}
1645    }
1646  else
1647    elt->first_same_value = elt;
1648
1649  /* If this is a constant being set equivalent to a register or a register
1650     being set equivalent to a constant, note the constant equivalence.
1651
1652     If this is a constant, it cannot be equivalent to a different constant,
1653     and a constant is the only thing that can be cheaper than a register.  So
1654     we know the register is the head of the class (before the constant was
1655     inserted).
1656
1657     If this is a register that is not already known equivalent to a
1658     constant, we must check the entire class.
1659
1660     If this is a register that is already known equivalent to an insn,
1661     update the qtys `const_insn' to show that `this_insn' is the latest
1662     insn making that quantity equivalent to the constant.  */
1663
1664  if (elt->is_const && classp && REG_P (classp->exp)
1665      && !REG_P (x))
1666    {
1667      int exp_q = REG_QTY (REGNO (classp->exp));
1668      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1669
1670      exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1671      exp_ent->const_insn = this_insn;
1672    }
1673
1674  else if (REG_P (x)
1675	   && classp
1676	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1677	   && ! elt->is_const)
1678    {
1679      struct table_elt *p;
1680
1681      for (p = classp; p != 0; p = p->next_same_value)
1682	{
1683	  if (p->is_const && !REG_P (p->exp))
1684	    {
1685	      int x_q = REG_QTY (REGNO (x));
1686	      struct qty_table_elem *x_ent = &qty_table[x_q];
1687
1688	      x_ent->const_rtx
1689		= gen_lowpart (GET_MODE (x), p->exp);
1690	      x_ent->const_insn = this_insn;
1691	      break;
1692	    }
1693	}
1694    }
1695
1696  else if (REG_P (x)
1697	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1698	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1699    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1700
1701  /* If this is a constant with symbolic value,
1702     and it has a term with an explicit integer value,
1703     link it up with related expressions.  */
1704  if (GET_CODE (x) == CONST)
1705    {
1706      rtx subexp = get_related_value (x);
1707      unsigned subhash;
1708      struct table_elt *subelt, *subelt_prev;
1709
1710      if (subexp != 0)
1711	{
1712	  /* Get the integer-free subexpression in the hash table.  */
1713	  subhash = SAFE_HASH (subexp, mode);
1714	  subelt = lookup (subexp, subhash, mode);
1715	  if (subelt == 0)
1716	    subelt = insert (subexp, NULL, subhash, mode);
1717	  /* Initialize SUBELT's circular chain if it has none.  */
1718	  if (subelt->related_value == 0)
1719	    subelt->related_value = subelt;
1720	  /* Find the element in the circular chain that precedes SUBELT.  */
1721	  subelt_prev = subelt;
1722	  while (subelt_prev->related_value != subelt)
1723	    subelt_prev = subelt_prev->related_value;
1724	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1725	     This way the element that follows SUBELT is the oldest one.  */
1726	  elt->related_value = subelt_prev->related_value;
1727	  subelt_prev->related_value = elt;
1728	}
1729    }
1730
1731  return elt;
1732}
1733
1734/* Wrap insert_with_costs by passing the default costs.  */
1735
1736static struct table_elt *
1737insert (rtx x, struct table_elt *classp, unsigned int hash,
1738	enum machine_mode mode)
1739{
1740  return
1741    insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1742}
1743
1744
1745/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1746   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1747   the two classes equivalent.
1748
1749   CLASS1 will be the surviving class; CLASS2 should not be used after this
1750   call.
1751
1752   Any invalid entries in CLASS2 will not be copied.  */
1753
1754static void
1755merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1756{
1757  struct table_elt *elt, *next, *new_elt;
1758
1759  /* Ensure we start with the head of the classes.  */
1760  class1 = class1->first_same_value;
1761  class2 = class2->first_same_value;
1762
1763  /* If they were already equal, forget it.  */
1764  if (class1 == class2)
1765    return;
1766
1767  for (elt = class2; elt; elt = next)
1768    {
1769      unsigned int hash;
1770      rtx exp = elt->exp;
1771      enum machine_mode mode = elt->mode;
1772
1773      next = elt->next_same_value;
1774
1775      /* Remove old entry, make a new one in CLASS1's class.
1776	 Don't do this for invalid entries as we cannot find their
1777	 hash code (it also isn't necessary).  */
1778      if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1779	{
1780	  bool need_rehash = false;
1781
1782	  hash_arg_in_memory = 0;
1783	  hash = HASH (exp, mode);
1784
1785	  if (REG_P (exp))
1786	    {
1787	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1788	      delete_reg_equiv (REGNO (exp));
1789	    }
1790
1791	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1792	    remove_pseudo_from_table (exp, hash);
1793	  else
1794	    remove_from_table (elt, hash);
1795
1796	  if (insert_regs (exp, class1, 0) || need_rehash)
1797	    {
1798	      rehash_using_reg (exp);
1799	      hash = HASH (exp, mode);
1800	    }
1801	  new_elt = insert (exp, class1, hash, mode);
1802	  new_elt->in_memory = hash_arg_in_memory;
1803	}
1804    }
1805}
1806
1807/* Flush the entire hash table.  */
1808
1809static void
1810flush_hash_table (void)
1811{
1812  int i;
1813  struct table_elt *p;
1814
1815  for (i = 0; i < HASH_SIZE; i++)
1816    for (p = table[i]; p; p = table[i])
1817      {
1818	/* Note that invalidate can remove elements
1819	   after P in the current hash chain.  */
1820	if (REG_P (p->exp))
1821	  invalidate (p->exp, VOIDmode);
1822	else
1823	  remove_from_table (p, i);
1824      }
1825}
1826
1827/* Function called for each rtx to check whether an anti dependence exist.  */
1828struct check_dependence_data
1829{
1830  enum machine_mode mode;
1831  rtx exp;
1832  rtx addr;
1833};
1834
1835static int
1836check_dependence (rtx *x, void *data)
1837{
1838  struct check_dependence_data *d = (struct check_dependence_data *) data;
1839  if (*x && MEM_P (*x))
1840    return canon_anti_dependence (*x, true, d->exp, d->mode, d->addr);
1841  else
1842    return 0;
1843}
1844
1845/* Remove from the hash table, or mark as invalid, all expressions whose
1846   values could be altered by storing in X.  X is a register, a subreg, or
1847   a memory reference with nonvarying address (because, when a memory
1848   reference with a varying address is stored in, all memory references are
1849   removed by invalidate_memory so specific invalidation is superfluous).
1850   FULL_MODE, if not VOIDmode, indicates that this much should be
1851   invalidated instead of just the amount indicated by the mode of X.  This
1852   is only used for bitfield stores into memory.
1853
1854   A nonvarying address may be just a register or just a symbol reference,
1855   or it may be either of those plus a numeric offset.  */
1856
1857static void
1858invalidate (rtx x, enum machine_mode full_mode)
1859{
1860  int i;
1861  struct table_elt *p;
1862  rtx addr;
1863
1864  switch (GET_CODE (x))
1865    {
1866    case REG:
1867      {
1868	/* If X is a register, dependencies on its contents are recorded
1869	   through the qty number mechanism.  Just change the qty number of
1870	   the register, mark it as invalid for expressions that refer to it,
1871	   and remove it itself.  */
1872	unsigned int regno = REGNO (x);
1873	unsigned int hash = HASH (x, GET_MODE (x));
1874
1875	/* Remove REGNO from any quantity list it might be on and indicate
1876	   that its value might have changed.  If it is a pseudo, remove its
1877	   entry from the hash table.
1878
1879	   For a hard register, we do the first two actions above for any
1880	   additional hard registers corresponding to X.  Then, if any of these
1881	   registers are in the table, we must remove any REG entries that
1882	   overlap these registers.  */
1883
1884	delete_reg_equiv (regno);
1885	REG_TICK (regno)++;
1886	SUBREG_TICKED (regno) = -1;
1887
1888	if (regno >= FIRST_PSEUDO_REGISTER)
1889	  remove_pseudo_from_table (x, hash);
1890	else
1891	  {
1892	    HOST_WIDE_INT in_table
1893	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1894	    unsigned int endregno = END_HARD_REGNO (x);
1895	    unsigned int tregno, tendregno, rn;
1896	    struct table_elt *p, *next;
1897
1898	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1899
1900	    for (rn = regno + 1; rn < endregno; rn++)
1901	      {
1902		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1903		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1904		delete_reg_equiv (rn);
1905		REG_TICK (rn)++;
1906		SUBREG_TICKED (rn) = -1;
1907	      }
1908
1909	    if (in_table)
1910	      for (hash = 0; hash < HASH_SIZE; hash++)
1911		for (p = table[hash]; p; p = next)
1912		  {
1913		    next = p->next_same_hash;
1914
1915		    if (!REG_P (p->exp)
1916			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1917		      continue;
1918
1919		    tregno = REGNO (p->exp);
1920		    tendregno = END_HARD_REGNO (p->exp);
1921		    if (tendregno > regno && tregno < endregno)
1922		      remove_from_table (p, hash);
1923		  }
1924	  }
1925      }
1926      return;
1927
1928    case SUBREG:
1929      invalidate (SUBREG_REG (x), VOIDmode);
1930      return;
1931
1932    case PARALLEL:
1933      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1934	invalidate (XVECEXP (x, 0, i), VOIDmode);
1935      return;
1936
1937    case EXPR_LIST:
1938      /* This is part of a disjoint return value; extract the location in
1939	 question ignoring the offset.  */
1940      invalidate (XEXP (x, 0), VOIDmode);
1941      return;
1942
1943    case MEM:
1944      addr = canon_rtx (get_addr (XEXP (x, 0)));
1945      /* Calculate the canonical version of X here so that
1946	 true_dependence doesn't generate new RTL for X on each call.  */
1947      x = canon_rtx (x);
1948
1949      /* Remove all hash table elements that refer to overlapping pieces of
1950	 memory.  */
1951      if (full_mode == VOIDmode)
1952	full_mode = GET_MODE (x);
1953
1954      for (i = 0; i < HASH_SIZE; i++)
1955	{
1956	  struct table_elt *next;
1957
1958	  for (p = table[i]; p; p = next)
1959	    {
1960	      next = p->next_same_hash;
1961	      if (p->in_memory)
1962		{
1963		  struct check_dependence_data d;
1964
1965		  /* Just canonicalize the expression once;
1966		     otherwise each time we call invalidate
1967		     true_dependence will canonicalize the
1968		     expression again.  */
1969		  if (!p->canon_exp)
1970		    p->canon_exp = canon_rtx (p->exp);
1971		  d.exp = x;
1972		  d.addr = addr;
1973		  d.mode = full_mode;
1974		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1975		    remove_from_table (p, i);
1976		}
1977	    }
1978	}
1979      return;
1980
1981    default:
1982      gcc_unreachable ();
1983    }
1984}
1985
1986/* Remove all expressions that refer to register REGNO,
1987   since they are already invalid, and we are about to
1988   mark that register valid again and don't want the old
1989   expressions to reappear as valid.  */
1990
1991static void
1992remove_invalid_refs (unsigned int regno)
1993{
1994  unsigned int i;
1995  struct table_elt *p, *next;
1996
1997  for (i = 0; i < HASH_SIZE; i++)
1998    for (p = table[i]; p; p = next)
1999      {
2000	next = p->next_same_hash;
2001	if (!REG_P (p->exp)
2002	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2003	  remove_from_table (p, i);
2004      }
2005}
2006
2007/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2008   and mode MODE.  */
2009static void
2010remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
2011			    enum machine_mode mode)
2012{
2013  unsigned int i;
2014  struct table_elt *p, *next;
2015  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2016
2017  for (i = 0; i < HASH_SIZE; i++)
2018    for (p = table[i]; p; p = next)
2019      {
2020	rtx exp = p->exp;
2021	next = p->next_same_hash;
2022
2023	if (!REG_P (exp)
2024	    && (GET_CODE (exp) != SUBREG
2025		|| !REG_P (SUBREG_REG (exp))
2026		|| REGNO (SUBREG_REG (exp)) != regno
2027		|| (((SUBREG_BYTE (exp)
2028		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2029		    && SUBREG_BYTE (exp) <= end))
2030	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2031	  remove_from_table (p, i);
2032      }
2033}
2034
2035/* Recompute the hash codes of any valid entries in the hash table that
2036   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2037
2038   This is called when we make a jump equivalence.  */
2039
2040static void
2041rehash_using_reg (rtx x)
2042{
2043  unsigned int i;
2044  struct table_elt *p, *next;
2045  unsigned hash;
2046
2047  if (GET_CODE (x) == SUBREG)
2048    x = SUBREG_REG (x);
2049
2050  /* If X is not a register or if the register is known not to be in any
2051     valid entries in the table, we have no work to do.  */
2052
2053  if (!REG_P (x)
2054      || REG_IN_TABLE (REGNO (x)) < 0
2055      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2056    return;
2057
2058  /* Scan all hash chains looking for valid entries that mention X.
2059     If we find one and it is in the wrong hash chain, move it.  */
2060
2061  for (i = 0; i < HASH_SIZE; i++)
2062    for (p = table[i]; p; p = next)
2063      {
2064	next = p->next_same_hash;
2065	if (reg_mentioned_p (x, p->exp)
2066	    && exp_equiv_p (p->exp, p->exp, 1, false)
2067	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2068	  {
2069	    if (p->next_same_hash)
2070	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2071
2072	    if (p->prev_same_hash)
2073	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2074	    else
2075	      table[i] = p->next_same_hash;
2076
2077	    p->next_same_hash = table[hash];
2078	    p->prev_same_hash = 0;
2079	    if (table[hash])
2080	      table[hash]->prev_same_hash = p;
2081	    table[hash] = p;
2082	  }
2083      }
2084}
2085
2086/* Remove from the hash table any expression that is a call-clobbered
2087   register.  Also update their TICK values.  */
2088
2089static void
2090invalidate_for_call (void)
2091{
2092  unsigned int regno, endregno;
2093  unsigned int i;
2094  unsigned hash;
2095  struct table_elt *p, *next;
2096  int in_table = 0;
2097  hard_reg_set_iterator hrsi;
2098
2099  /* Go through all the hard registers.  For each that is clobbered in
2100     a CALL_INSN, remove the register from quantity chains and update
2101     reg_tick if defined.  Also see if any of these registers is currently
2102     in the table.  */
2103  EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
2104    {
2105      delete_reg_equiv (regno);
2106      if (REG_TICK (regno) >= 0)
2107	{
2108	  REG_TICK (regno)++;
2109	  SUBREG_TICKED (regno) = -1;
2110	}
2111      in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2112    }
2113
2114  /* In the case where we have no call-clobbered hard registers in the
2115     table, we are done.  Otherwise, scan the table and remove any
2116     entry that overlaps a call-clobbered register.  */
2117
2118  if (in_table)
2119    for (hash = 0; hash < HASH_SIZE; hash++)
2120      for (p = table[hash]; p; p = next)
2121	{
2122	  next = p->next_same_hash;
2123
2124	  if (!REG_P (p->exp)
2125	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2126	    continue;
2127
2128	  regno = REGNO (p->exp);
2129	  endregno = END_HARD_REGNO (p->exp);
2130
2131	  for (i = regno; i < endregno; i++)
2132	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2133	      {
2134		remove_from_table (p, hash);
2135		break;
2136	      }
2137	}
2138}
2139
2140/* Given an expression X of type CONST,
2141   and ELT which is its table entry (or 0 if it
2142   is not in the hash table),
2143   return an alternate expression for X as a register plus integer.
2144   If none can be found, return 0.  */
2145
2146static rtx
2147use_related_value (rtx x, struct table_elt *elt)
2148{
2149  struct table_elt *relt = 0;
2150  struct table_elt *p, *q;
2151  HOST_WIDE_INT offset;
2152
2153  /* First, is there anything related known?
2154     If we have a table element, we can tell from that.
2155     Otherwise, must look it up.  */
2156
2157  if (elt != 0 && elt->related_value != 0)
2158    relt = elt;
2159  else if (elt == 0 && GET_CODE (x) == CONST)
2160    {
2161      rtx subexp = get_related_value (x);
2162      if (subexp != 0)
2163	relt = lookup (subexp,
2164		       SAFE_HASH (subexp, GET_MODE (subexp)),
2165		       GET_MODE (subexp));
2166    }
2167
2168  if (relt == 0)
2169    return 0;
2170
2171  /* Search all related table entries for one that has an
2172     equivalent register.  */
2173
2174  p = relt;
2175  while (1)
2176    {
2177      /* This loop is strange in that it is executed in two different cases.
2178	 The first is when X is already in the table.  Then it is searching
2179	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2180	 X is not in the table.  Then RELT points to a class for the related
2181	 value.
2182
2183	 Ensure that, whatever case we are in, that we ignore classes that have
2184	 the same value as X.  */
2185
2186      if (rtx_equal_p (x, p->exp))
2187	q = 0;
2188      else
2189	for (q = p->first_same_value; q; q = q->next_same_value)
2190	  if (REG_P (q->exp))
2191	    break;
2192
2193      if (q)
2194	break;
2195
2196      p = p->related_value;
2197
2198      /* We went all the way around, so there is nothing to be found.
2199	 Alternatively, perhaps RELT was in the table for some other reason
2200	 and it has no related values recorded.  */
2201      if (p == relt || p == 0)
2202	break;
2203    }
2204
2205  if (q == 0)
2206    return 0;
2207
2208  offset = (get_integer_term (x) - get_integer_term (p->exp));
2209  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2210  return plus_constant (q->mode, q->exp, offset);
2211}
2212
2213
2214/* Hash a string.  Just add its bytes up.  */
2215static inline unsigned
2216hash_rtx_string (const char *ps)
2217{
2218  unsigned hash = 0;
2219  const unsigned char *p = (const unsigned char *) ps;
2220
2221  if (p)
2222    while (*p)
2223      hash += *p++;
2224
2225  return hash;
2226}
2227
2228/* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2229   When the callback returns true, we continue with the new rtx.  */
2230
2231unsigned
2232hash_rtx_cb (const_rtx x, enum machine_mode mode,
2233             int *do_not_record_p, int *hash_arg_in_memory_p,
2234             bool have_reg_qty, hash_rtx_callback_function cb)
2235{
2236  int i, j;
2237  unsigned hash = 0;
2238  enum rtx_code code;
2239  const char *fmt;
2240  enum machine_mode newmode;
2241  rtx newx;
2242
2243  /* Used to turn recursion into iteration.  We can't rely on GCC's
2244     tail-recursion elimination since we need to keep accumulating values
2245     in HASH.  */
2246 repeat:
2247  if (x == 0)
2248    return hash;
2249
2250  /* Invoke the callback first.  */
2251  if (cb != NULL
2252      && ((*cb) (x, mode, &newx, &newmode)))
2253    {
2254      hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2255                           hash_arg_in_memory_p, have_reg_qty, cb);
2256      return hash;
2257    }
2258
2259  code = GET_CODE (x);
2260  switch (code)
2261    {
2262    case REG:
2263      {
2264	unsigned int regno = REGNO (x);
2265
2266	if (do_not_record_p && !reload_completed)
2267	  {
2268	    /* On some machines, we can't record any non-fixed hard register,
2269	       because extending its life will cause reload problems.  We
2270	       consider ap, fp, sp, gp to be fixed for this purpose.
2271
2272	       We also consider CCmode registers to be fixed for this purpose;
2273	       failure to do so leads to failure to simplify 0<100 type of
2274	       conditionals.
2275
2276	       On all machines, we can't record any global registers.
2277	       Nor should we record any register that is in a small
2278	       class, as defined by TARGET_CLASS_LIKELY_SPILLED_P.  */
2279	    bool record;
2280
2281	    if (regno >= FIRST_PSEUDO_REGISTER)
2282	      record = true;
2283	    else if (x == frame_pointer_rtx
2284		     || x == hard_frame_pointer_rtx
2285		     || x == arg_pointer_rtx
2286		     || x == stack_pointer_rtx
2287		     || x == pic_offset_table_rtx)
2288	      record = true;
2289	    else if (global_regs[regno])
2290	      record = false;
2291	    else if (fixed_regs[regno])
2292	      record = true;
2293	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2294	      record = true;
2295	    else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2296	      record = false;
2297	    else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2298	      record = false;
2299	    else
2300	      record = true;
2301
2302	    if (!record)
2303	      {
2304		*do_not_record_p = 1;
2305		return 0;
2306	      }
2307	  }
2308
2309	hash += ((unsigned int) REG << 7);
2310        hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2311	return hash;
2312      }
2313
2314    /* We handle SUBREG of a REG specially because the underlying
2315       reg changes its hash value with every value change; we don't
2316       want to have to forget unrelated subregs when one subreg changes.  */
2317    case SUBREG:
2318      {
2319	if (REG_P (SUBREG_REG (x)))
2320	  {
2321	    hash += (((unsigned int) SUBREG << 7)
2322		     + REGNO (SUBREG_REG (x))
2323		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2324	    return hash;
2325	  }
2326	break;
2327      }
2328
2329    case CONST_INT:
2330      hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2331               + (unsigned int) INTVAL (x));
2332      return hash;
2333
2334    case CONST_DOUBLE:
2335      /* This is like the general case, except that it only counts
2336	 the integers representing the constant.  */
2337      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2338      if (GET_MODE (x) != VOIDmode)
2339	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2340      else
2341	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2342		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2343      return hash;
2344
2345    case CONST_FIXED:
2346      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2347      hash += fixed_hash (CONST_FIXED_VALUE (x));
2348      return hash;
2349
2350    case CONST_VECTOR:
2351      {
2352	int units;
2353	rtx elt;
2354
2355	units = CONST_VECTOR_NUNITS (x);
2356
2357	for (i = 0; i < units; ++i)
2358	  {
2359	    elt = CONST_VECTOR_ELT (x, i);
2360	    hash += hash_rtx_cb (elt, GET_MODE (elt),
2361                                 do_not_record_p, hash_arg_in_memory_p,
2362                                 have_reg_qty, cb);
2363	  }
2364
2365	return hash;
2366      }
2367
2368      /* Assume there is only one rtx object for any given label.  */
2369    case LABEL_REF:
2370      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2371	 differences and differences between each stage's debugging dumps.  */
2372	 hash += (((unsigned int) LABEL_REF << 7)
2373		  + CODE_LABEL_NUMBER (XEXP (x, 0)));
2374      return hash;
2375
2376    case SYMBOL_REF:
2377      {
2378	/* Don't hash on the symbol's address to avoid bootstrap differences.
2379	   Different hash values may cause expressions to be recorded in
2380	   different orders and thus different registers to be used in the
2381	   final assembler.  This also avoids differences in the dump files
2382	   between various stages.  */
2383	unsigned int h = 0;
2384	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2385
2386	while (*p)
2387	  h += (h << 7) + *p++; /* ??? revisit */
2388
2389	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2390	return hash;
2391      }
2392
2393    case MEM:
2394      /* We don't record if marked volatile or if BLKmode since we don't
2395	 know the size of the move.  */
2396      if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2397	{
2398	  *do_not_record_p = 1;
2399	  return 0;
2400	}
2401      if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2402	*hash_arg_in_memory_p = 1;
2403
2404      /* Now that we have already found this special case,
2405	 might as well speed it up as much as possible.  */
2406      hash += (unsigned) MEM;
2407      x = XEXP (x, 0);
2408      goto repeat;
2409
2410    case USE:
2411      /* A USE that mentions non-volatile memory needs special
2412	 handling since the MEM may be BLKmode which normally
2413	 prevents an entry from being made.  Pure calls are
2414	 marked by a USE which mentions BLKmode memory.
2415	 See calls.c:emit_call_1.  */
2416      if (MEM_P (XEXP (x, 0))
2417	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2418	{
2419	  hash += (unsigned) USE;
2420	  x = XEXP (x, 0);
2421
2422	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2423	    *hash_arg_in_memory_p = 1;
2424
2425	  /* Now that we have already found this special case,
2426	     might as well speed it up as much as possible.  */
2427	  hash += (unsigned) MEM;
2428	  x = XEXP (x, 0);
2429	  goto repeat;
2430	}
2431      break;
2432
2433    case PRE_DEC:
2434    case PRE_INC:
2435    case POST_DEC:
2436    case POST_INC:
2437    case PRE_MODIFY:
2438    case POST_MODIFY:
2439    case PC:
2440    case CC0:
2441    case CALL:
2442    case UNSPEC_VOLATILE:
2443      if (do_not_record_p) {
2444        *do_not_record_p = 1;
2445        return 0;
2446      }
2447      else
2448        return hash;
2449      break;
2450
2451    case ASM_OPERANDS:
2452      if (do_not_record_p && MEM_VOLATILE_P (x))
2453	{
2454	  *do_not_record_p = 1;
2455	  return 0;
2456	}
2457      else
2458	{
2459	  /* We don't want to take the filename and line into account.  */
2460	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2461	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2462	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2463	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2464
2465	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2466	    {
2467	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2468		{
2469		  hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2470                                        GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2471                                        do_not_record_p, hash_arg_in_memory_p,
2472                                        have_reg_qty, cb)
2473			   + hash_rtx_string
2474                           (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2475		}
2476
2477	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2478	      x = ASM_OPERANDS_INPUT (x, 0);
2479	      mode = GET_MODE (x);
2480	      goto repeat;
2481	    }
2482
2483	  return hash;
2484	}
2485      break;
2486
2487    default:
2488      break;
2489    }
2490
2491  i = GET_RTX_LENGTH (code) - 1;
2492  hash += (unsigned) code + (unsigned) GET_MODE (x);
2493  fmt = GET_RTX_FORMAT (code);
2494  for (; i >= 0; i--)
2495    {
2496      switch (fmt[i])
2497	{
2498	case 'e':
2499	  /* If we are about to do the last recursive call
2500	     needed at this level, change it into iteration.
2501	     This function  is called enough to be worth it.  */
2502	  if (i == 0)
2503	    {
2504	      x = XEXP (x, i);
2505	      goto repeat;
2506	    }
2507
2508	  hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2509                               hash_arg_in_memory_p,
2510                               have_reg_qty, cb);
2511	  break;
2512
2513	case 'E':
2514	  for (j = 0; j < XVECLEN (x, i); j++)
2515	    hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2516                                 hash_arg_in_memory_p,
2517                                 have_reg_qty, cb);
2518	  break;
2519
2520	case 's':
2521	  hash += hash_rtx_string (XSTR (x, i));
2522	  break;
2523
2524	case 'i':
2525	  hash += (unsigned int) XINT (x, i);
2526	  break;
2527
2528	case '0': case 't':
2529	  /* Unused.  */
2530	  break;
2531
2532	default:
2533	  gcc_unreachable ();
2534	}
2535    }
2536
2537  return hash;
2538}
2539
2540/* Hash an rtx.  We are careful to make sure the value is never negative.
2541   Equivalent registers hash identically.
2542   MODE is used in hashing for CONST_INTs only;
2543   otherwise the mode of X is used.
2544
2545   Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2546
2547   If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2548   a MEM rtx which does not have the MEM_READONLY_P flag set.
2549
2550   Note that cse_insn knows that the hash code of a MEM expression
2551   is just (int) MEM plus the hash code of the address.  */
2552
2553unsigned
2554hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
2555	  int *hash_arg_in_memory_p, bool have_reg_qty)
2556{
2557  return hash_rtx_cb (x, mode, do_not_record_p,
2558                      hash_arg_in_memory_p, have_reg_qty, NULL);
2559}
2560
2561/* Hash an rtx X for cse via hash_rtx.
2562   Stores 1 in do_not_record if any subexpression is volatile.
2563   Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2564   does not have the MEM_READONLY_P flag set.  */
2565
2566static inline unsigned
2567canon_hash (rtx x, enum machine_mode mode)
2568{
2569  return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2570}
2571
2572/* Like canon_hash but with no side effects, i.e. do_not_record
2573   and hash_arg_in_memory are not changed.  */
2574
2575static inline unsigned
2576safe_hash (rtx x, enum machine_mode mode)
2577{
2578  int dummy_do_not_record;
2579  return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2580}
2581
2582/* Return 1 iff X and Y would canonicalize into the same thing,
2583   without actually constructing the canonicalization of either one.
2584   If VALIDATE is nonzero,
2585   we assume X is an expression being processed from the rtl
2586   and Y was found in the hash table.  We check register refs
2587   in Y for being marked as valid.
2588
2589   If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2590
2591int
2592exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2593{
2594  int i, j;
2595  enum rtx_code code;
2596  const char *fmt;
2597
2598  /* Note: it is incorrect to assume an expression is equivalent to itself
2599     if VALIDATE is nonzero.  */
2600  if (x == y && !validate)
2601    return 1;
2602
2603  if (x == 0 || y == 0)
2604    return x == y;
2605
2606  code = GET_CODE (x);
2607  if (code != GET_CODE (y))
2608    return 0;
2609
2610  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2611  if (GET_MODE (x) != GET_MODE (y))
2612    return 0;
2613
2614  /* MEMs referring to different address space are not equivalent.  */
2615  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2616    return 0;
2617
2618  switch (code)
2619    {
2620    case PC:
2621    case CC0:
2622    CASE_CONST_UNIQUE:
2623      return x == y;
2624
2625    case LABEL_REF:
2626      return XEXP (x, 0) == XEXP (y, 0);
2627
2628    case SYMBOL_REF:
2629      return XSTR (x, 0) == XSTR (y, 0);
2630
2631    case REG:
2632      if (for_gcse)
2633	return REGNO (x) == REGNO (y);
2634      else
2635	{
2636	  unsigned int regno = REGNO (y);
2637	  unsigned int i;
2638	  unsigned int endregno = END_REGNO (y);
2639
2640	  /* If the quantities are not the same, the expressions are not
2641	     equivalent.  If there are and we are not to validate, they
2642	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2643
2644	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2645	    return 0;
2646
2647	  if (! validate)
2648	    return 1;
2649
2650	  for (i = regno; i < endregno; i++)
2651	    if (REG_IN_TABLE (i) != REG_TICK (i))
2652	      return 0;
2653
2654	  return 1;
2655	}
2656
2657    case MEM:
2658      if (for_gcse)
2659	{
2660	  /* A volatile mem should not be considered equivalent to any
2661	     other.  */
2662	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2663	    return 0;
2664
2665	  /* Can't merge two expressions in different alias sets, since we
2666	     can decide that the expression is transparent in a block when
2667	     it isn't, due to it being set with the different alias set.
2668
2669	     Also, can't merge two expressions with different MEM_ATTRS.
2670	     They could e.g. be two different entities allocated into the
2671	     same space on the stack (see e.g. PR25130).  In that case, the
2672	     MEM addresses can be the same, even though the two MEMs are
2673	     absolutely not equivalent.
2674
2675	     But because really all MEM attributes should be the same for
2676	     equivalent MEMs, we just use the invariant that MEMs that have
2677	     the same attributes share the same mem_attrs data structure.  */
2678	  if (MEM_ATTRS (x) != MEM_ATTRS (y))
2679	    return 0;
2680	}
2681      break;
2682
2683    /*  For commutative operations, check both orders.  */
2684    case PLUS:
2685    case MULT:
2686    case AND:
2687    case IOR:
2688    case XOR:
2689    case NE:
2690    case EQ:
2691      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2692			     validate, for_gcse)
2693	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2694				validate, for_gcse))
2695	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2696				validate, for_gcse)
2697		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2698				   validate, for_gcse)));
2699
2700    case ASM_OPERANDS:
2701      /* We don't use the generic code below because we want to
2702	 disregard filename and line numbers.  */
2703
2704      /* A volatile asm isn't equivalent to any other.  */
2705      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2706	return 0;
2707
2708      if (GET_MODE (x) != GET_MODE (y)
2709	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2710	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2711		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2712	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2713	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2714	return 0;
2715
2716      if (ASM_OPERANDS_INPUT_LENGTH (x))
2717	{
2718	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2719	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2720			       ASM_OPERANDS_INPUT (y, i),
2721			       validate, for_gcse)
2722		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2723			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2724	      return 0;
2725	}
2726
2727      return 1;
2728
2729    default:
2730      break;
2731    }
2732
2733  /* Compare the elements.  If any pair of corresponding elements
2734     fail to match, return 0 for the whole thing.  */
2735
2736  fmt = GET_RTX_FORMAT (code);
2737  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2738    {
2739      switch (fmt[i])
2740	{
2741	case 'e':
2742	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2743			      validate, for_gcse))
2744	    return 0;
2745	  break;
2746
2747	case 'E':
2748	  if (XVECLEN (x, i) != XVECLEN (y, i))
2749	    return 0;
2750	  for (j = 0; j < XVECLEN (x, i); j++)
2751	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2752				validate, for_gcse))
2753	      return 0;
2754	  break;
2755
2756	case 's':
2757	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2758	    return 0;
2759	  break;
2760
2761	case 'i':
2762	  if (XINT (x, i) != XINT (y, i))
2763	    return 0;
2764	  break;
2765
2766	case 'w':
2767	  if (XWINT (x, i) != XWINT (y, i))
2768	    return 0;
2769	  break;
2770
2771	case '0':
2772	case 't':
2773	  break;
2774
2775	default:
2776	  gcc_unreachable ();
2777	}
2778    }
2779
2780  return 1;
2781}
2782
2783/* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2784   the result if necessary.  INSN is as for canon_reg.  */
2785
2786static void
2787validate_canon_reg (rtx *xloc, rtx insn)
2788{
2789  if (*xloc)
2790    {
2791      rtx new_rtx = canon_reg (*xloc, insn);
2792
2793      /* If replacing pseudo with hard reg or vice versa, ensure the
2794         insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2795      gcc_assert (insn && new_rtx);
2796      validate_change (insn, xloc, new_rtx, 1);
2797    }
2798}
2799
2800/* Canonicalize an expression:
2801   replace each register reference inside it
2802   with the "oldest" equivalent register.
2803
2804   If INSN is nonzero validate_change is used to ensure that INSN remains valid
2805   after we make our substitution.  The calls are made with IN_GROUP nonzero
2806   so apply_change_group must be called upon the outermost return from this
2807   function (unless INSN is zero).  The result of apply_change_group can
2808   generally be discarded since the changes we are making are optional.  */
2809
2810static rtx
2811canon_reg (rtx x, rtx insn)
2812{
2813  int i;
2814  enum rtx_code code;
2815  const char *fmt;
2816
2817  if (x == 0)
2818    return x;
2819
2820  code = GET_CODE (x);
2821  switch (code)
2822    {
2823    case PC:
2824    case CC0:
2825    case CONST:
2826    CASE_CONST_ANY:
2827    case SYMBOL_REF:
2828    case LABEL_REF:
2829    case ADDR_VEC:
2830    case ADDR_DIFF_VEC:
2831      return x;
2832
2833    case REG:
2834      {
2835	int first;
2836	int q;
2837	struct qty_table_elem *ent;
2838
2839	/* Never replace a hard reg, because hard regs can appear
2840	   in more than one machine mode, and we must preserve the mode
2841	   of each occurrence.  Also, some hard regs appear in
2842	   MEMs that are shared and mustn't be altered.  Don't try to
2843	   replace any reg that maps to a reg of class NO_REGS.  */
2844	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2845	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2846	  return x;
2847
2848	q = REG_QTY (REGNO (x));
2849	ent = &qty_table[q];
2850	first = ent->first_reg;
2851	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2852		: REGNO_REG_CLASS (first) == NO_REGS ? x
2853		: gen_rtx_REG (ent->mode, first));
2854      }
2855
2856    default:
2857      break;
2858    }
2859
2860  fmt = GET_RTX_FORMAT (code);
2861  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2862    {
2863      int j;
2864
2865      if (fmt[i] == 'e')
2866	validate_canon_reg (&XEXP (x, i), insn);
2867      else if (fmt[i] == 'E')
2868	for (j = 0; j < XVECLEN (x, i); j++)
2869	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2870    }
2871
2872  return x;
2873}
2874
2875/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2876   operation (EQ, NE, GT, etc.), follow it back through the hash table and
2877   what values are being compared.
2878
2879   *PARG1 and *PARG2 are updated to contain the rtx representing the values
2880   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
2881   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2882   compared to produce cc0.
2883
2884   The return value is the comparison operator and is either the code of
2885   A or the code corresponding to the inverse of the comparison.  */
2886
2887static enum rtx_code
2888find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2889		      enum machine_mode *pmode1, enum machine_mode *pmode2)
2890{
2891  rtx arg1, arg2;
2892  struct pointer_set_t *visited = NULL;
2893  /* Set nonzero when we find something of interest.  */
2894  rtx x = NULL;
2895
2896  arg1 = *parg1, arg2 = *parg2;
2897
2898  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
2899
2900  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2901    {
2902      int reverse_code = 0;
2903      struct table_elt *p = 0;
2904
2905      /* Remember state from previous iteration.  */
2906      if (x)
2907	{
2908	  if (!visited)
2909	    visited = pointer_set_create ();
2910	  pointer_set_insert (visited, x);
2911	  x = 0;
2912	}
2913
2914      /* If arg1 is a COMPARE, extract the comparison arguments from it.
2915	 On machines with CC0, this is the only case that can occur, since
2916	 fold_rtx will return the COMPARE or item being compared with zero
2917	 when given CC0.  */
2918
2919      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2920	x = arg1;
2921
2922      /* If ARG1 is a comparison operator and CODE is testing for
2923	 STORE_FLAG_VALUE, get the inner arguments.  */
2924
2925      else if (COMPARISON_P (arg1))
2926	{
2927#ifdef FLOAT_STORE_FLAG_VALUE
2928	  REAL_VALUE_TYPE fsfv;
2929#endif
2930
2931	  if (code == NE
2932	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2933		  && code == LT && STORE_FLAG_VALUE == -1)
2934#ifdef FLOAT_STORE_FLAG_VALUE
2935	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2936		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2937		      REAL_VALUE_NEGATIVE (fsfv)))
2938#endif
2939	      )
2940	    x = arg1;
2941	  else if (code == EQ
2942		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2943		       && code == GE && STORE_FLAG_VALUE == -1)
2944#ifdef FLOAT_STORE_FLAG_VALUE
2945		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2946		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2947			   REAL_VALUE_NEGATIVE (fsfv)))
2948#endif
2949		   )
2950	    x = arg1, reverse_code = 1;
2951	}
2952
2953      /* ??? We could also check for
2954
2955	 (ne (and (eq (...) (const_int 1))) (const_int 0))
2956
2957	 and related forms, but let's wait until we see them occurring.  */
2958
2959      if (x == 0)
2960	/* Look up ARG1 in the hash table and see if it has an equivalence
2961	   that lets us see what is being compared.  */
2962	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2963      if (p)
2964	{
2965	  p = p->first_same_value;
2966
2967	  /* If what we compare is already known to be constant, that is as
2968	     good as it gets.
2969	     We need to break the loop in this case, because otherwise we
2970	     can have an infinite loop when looking at a reg that is known
2971	     to be a constant which is the same as a comparison of a reg
2972	     against zero which appears later in the insn stream, which in
2973	     turn is constant and the same as the comparison of the first reg
2974	     against zero...  */
2975	  if (p->is_const)
2976	    break;
2977	}
2978
2979      for (; p; p = p->next_same_value)
2980	{
2981	  enum machine_mode inner_mode = GET_MODE (p->exp);
2982#ifdef FLOAT_STORE_FLAG_VALUE
2983	  REAL_VALUE_TYPE fsfv;
2984#endif
2985
2986	  /* If the entry isn't valid, skip it.  */
2987	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
2988	    continue;
2989
2990	  /* If it's a comparison we've used before, skip it.  */
2991	  if (visited && pointer_set_contains (visited, p->exp))
2992	    continue;
2993
2994	  if (GET_CODE (p->exp) == COMPARE
2995	      /* Another possibility is that this machine has a compare insn
2996		 that includes the comparison code.  In that case, ARG1 would
2997		 be equivalent to a comparison operation that would set ARG1 to
2998		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
2999		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3000		 we must reverse ORIG_CODE.  On machine with a negative value
3001		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3002	      || ((code == NE
3003		   || (code == LT
3004		       && val_signbit_known_set_p (inner_mode,
3005						   STORE_FLAG_VALUE))
3006#ifdef FLOAT_STORE_FLAG_VALUE
3007		   || (code == LT
3008		       && SCALAR_FLOAT_MODE_P (inner_mode)
3009		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3010			   REAL_VALUE_NEGATIVE (fsfv)))
3011#endif
3012		   )
3013		  && COMPARISON_P (p->exp)))
3014	    {
3015	      x = p->exp;
3016	      break;
3017	    }
3018	  else if ((code == EQ
3019		    || (code == GE
3020			&& val_signbit_known_set_p (inner_mode,
3021						    STORE_FLAG_VALUE))
3022#ifdef FLOAT_STORE_FLAG_VALUE
3023		    || (code == GE
3024			&& SCALAR_FLOAT_MODE_P (inner_mode)
3025			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3026			    REAL_VALUE_NEGATIVE (fsfv)))
3027#endif
3028		    )
3029		   && COMPARISON_P (p->exp))
3030	    {
3031	      reverse_code = 1;
3032	      x = p->exp;
3033	      break;
3034	    }
3035
3036	  /* If this non-trapping address, e.g. fp + constant, the
3037	     equivalent is a better operand since it may let us predict
3038	     the value of the comparison.  */
3039	  else if (!rtx_addr_can_trap_p (p->exp))
3040	    {
3041	      arg1 = p->exp;
3042	      continue;
3043	    }
3044	}
3045
3046      /* If we didn't find a useful equivalence for ARG1, we are done.
3047	 Otherwise, set up for the next iteration.  */
3048      if (x == 0)
3049	break;
3050
3051      /* If we need to reverse the comparison, make sure that that is
3052	 possible -- we can't necessarily infer the value of GE from LT
3053	 with floating-point operands.  */
3054      if (reverse_code)
3055	{
3056	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3057	  if (reversed == UNKNOWN)
3058	    break;
3059	  else
3060	    code = reversed;
3061	}
3062      else if (COMPARISON_P (x))
3063	code = GET_CODE (x);
3064      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3065    }
3066
3067  /* Return our results.  Return the modes from before fold_rtx
3068     because fold_rtx might produce const_int, and then it's too late.  */
3069  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3070  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3071
3072  if (visited)
3073    pointer_set_destroy (visited);
3074  return code;
3075}
3076
3077/* If X is a nontrivial arithmetic operation on an argument for which
3078   a constant value can be determined, return the result of operating
3079   on that value, as a constant.  Otherwise, return X, possibly with
3080   one or more operands changed to a forward-propagated constant.
3081
3082   If X is a register whose contents are known, we do NOT return
3083   those contents here; equiv_constant is called to perform that task.
3084   For SUBREGs and MEMs, we do that both here and in equiv_constant.
3085
3086   INSN is the insn that we may be modifying.  If it is 0, make a copy
3087   of X before modifying it.  */
3088
3089static rtx
3090fold_rtx (rtx x, rtx insn)
3091{
3092  enum rtx_code code;
3093  enum machine_mode mode;
3094  const char *fmt;
3095  int i;
3096  rtx new_rtx = 0;
3097  int changed = 0;
3098
3099  /* Operands of X.  */
3100  rtx folded_arg0;
3101  rtx folded_arg1;
3102
3103  /* Constant equivalents of first three operands of X;
3104     0 when no such equivalent is known.  */
3105  rtx const_arg0;
3106  rtx const_arg1;
3107  rtx const_arg2;
3108
3109  /* The mode of the first operand of X.  We need this for sign and zero
3110     extends.  */
3111  enum machine_mode mode_arg0;
3112
3113  if (x == 0)
3114    return x;
3115
3116  /* Try to perform some initial simplifications on X.  */
3117  code = GET_CODE (x);
3118  switch (code)
3119    {
3120    case MEM:
3121    case SUBREG:
3122      if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3123        return new_rtx;
3124      return x;
3125
3126    case CONST:
3127    CASE_CONST_ANY:
3128    case SYMBOL_REF:
3129    case LABEL_REF:
3130    case REG:
3131    case PC:
3132      /* No use simplifying an EXPR_LIST
3133	 since they are used only for lists of args
3134	 in a function call's REG_EQUAL note.  */
3135    case EXPR_LIST:
3136      return x;
3137
3138#ifdef HAVE_cc0
3139    case CC0:
3140      return prev_insn_cc0;
3141#endif
3142
3143    case ASM_OPERANDS:
3144      if (insn)
3145	{
3146	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3147	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3148			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3149	}
3150      return x;
3151
3152#ifdef NO_FUNCTION_CSE
3153    case CALL:
3154      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3155	return x;
3156      break;
3157#endif
3158
3159    /* Anything else goes through the loop below.  */
3160    default:
3161      break;
3162    }
3163
3164  mode = GET_MODE (x);
3165  const_arg0 = 0;
3166  const_arg1 = 0;
3167  const_arg2 = 0;
3168  mode_arg0 = VOIDmode;
3169
3170  /* Try folding our operands.
3171     Then see which ones have constant values known.  */
3172
3173  fmt = GET_RTX_FORMAT (code);
3174  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3175    if (fmt[i] == 'e')
3176      {
3177	rtx folded_arg = XEXP (x, i), const_arg;
3178	enum machine_mode mode_arg = GET_MODE (folded_arg);
3179
3180	switch (GET_CODE (folded_arg))
3181	  {
3182	  case MEM:
3183	  case REG:
3184	  case SUBREG:
3185	    const_arg = equiv_constant (folded_arg);
3186	    break;
3187
3188	  case CONST:
3189	  CASE_CONST_ANY:
3190	  case SYMBOL_REF:
3191	  case LABEL_REF:
3192	    const_arg = folded_arg;
3193	    break;
3194
3195#ifdef HAVE_cc0
3196	  case CC0:
3197	    folded_arg = prev_insn_cc0;
3198	    mode_arg = prev_insn_cc0_mode;
3199	    const_arg = equiv_constant (folded_arg);
3200	    break;
3201#endif
3202
3203	  default:
3204	    folded_arg = fold_rtx (folded_arg, insn);
3205	    const_arg = equiv_constant (folded_arg);
3206	    break;
3207	  }
3208
3209	/* For the first three operands, see if the operand
3210	   is constant or equivalent to a constant.  */
3211	switch (i)
3212	  {
3213	  case 0:
3214	    folded_arg0 = folded_arg;
3215	    const_arg0 = const_arg;
3216	    mode_arg0 = mode_arg;
3217	    break;
3218	  case 1:
3219	    folded_arg1 = folded_arg;
3220	    const_arg1 = const_arg;
3221	    break;
3222	  case 2:
3223	    const_arg2 = const_arg;
3224	    break;
3225	  }
3226
3227	/* Pick the least expensive of the argument and an equivalent constant
3228	   argument.  */
3229	if (const_arg != 0
3230	    && const_arg != folded_arg
3231	    && COST_IN (const_arg, code, i) <= COST_IN (folded_arg, code, i)
3232
3233	    /* It's not safe to substitute the operand of a conversion
3234	       operator with a constant, as the conversion's identity
3235	       depends upon the mode of its operand.  This optimization
3236	       is handled by the call to simplify_unary_operation.  */
3237	    && (GET_RTX_CLASS (code) != RTX_UNARY
3238		|| GET_MODE (const_arg) == mode_arg0
3239		|| (code != ZERO_EXTEND
3240		    && code != SIGN_EXTEND
3241		    && code != TRUNCATE
3242		    && code != FLOAT_TRUNCATE
3243		    && code != FLOAT_EXTEND
3244		    && code != FLOAT
3245		    && code != FIX
3246		    && code != UNSIGNED_FLOAT
3247		    && code != UNSIGNED_FIX)))
3248	  folded_arg = const_arg;
3249
3250	if (folded_arg == XEXP (x, i))
3251	  continue;
3252
3253	if (insn == NULL_RTX && !changed)
3254	  x = copy_rtx (x);
3255	changed = 1;
3256	validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3257      }
3258
3259  if (changed)
3260    {
3261      /* Canonicalize X if necessary, and keep const_argN and folded_argN
3262	 consistent with the order in X.  */
3263      if (canonicalize_change_group (insn, x))
3264	{
3265	  rtx tem;
3266	  tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3267	  tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3268	}
3269
3270      apply_change_group ();
3271    }
3272
3273  /* If X is an arithmetic operation, see if we can simplify it.  */
3274
3275  switch (GET_RTX_CLASS (code))
3276    {
3277    case RTX_UNARY:
3278      {
3279	/* We can't simplify extension ops unless we know the
3280	   original mode.  */
3281	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3282	    && mode_arg0 == VOIDmode)
3283	  break;
3284
3285	new_rtx = simplify_unary_operation (code, mode,
3286					const_arg0 ? const_arg0 : folded_arg0,
3287					mode_arg0);
3288      }
3289      break;
3290
3291    case RTX_COMPARE:
3292    case RTX_COMM_COMPARE:
3293      /* See what items are actually being compared and set FOLDED_ARG[01]
3294	 to those values and CODE to the actual comparison code.  If any are
3295	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3296	 do anything if both operands are already known to be constant.  */
3297
3298      /* ??? Vector mode comparisons are not supported yet.  */
3299      if (VECTOR_MODE_P (mode))
3300	break;
3301
3302      if (const_arg0 == 0 || const_arg1 == 0)
3303	{
3304	  struct table_elt *p0, *p1;
3305	  rtx true_rtx, false_rtx;
3306	  enum machine_mode mode_arg1;
3307
3308	  if (SCALAR_FLOAT_MODE_P (mode))
3309	    {
3310#ifdef FLOAT_STORE_FLAG_VALUE
3311	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3312			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3313#else
3314	      true_rtx = NULL_RTX;
3315#endif
3316	      false_rtx = CONST0_RTX (mode);
3317	    }
3318	  else
3319	    {
3320	      true_rtx = const_true_rtx;
3321	      false_rtx = const0_rtx;
3322	    }
3323
3324	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3325				       &mode_arg0, &mode_arg1);
3326
3327	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3328	     what kinds of things are being compared, so we can't do
3329	     anything with this comparison.  */
3330
3331	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3332	    break;
3333
3334	  const_arg0 = equiv_constant (folded_arg0);
3335	  const_arg1 = equiv_constant (folded_arg1);
3336
3337	  /* If we do not now have two constants being compared, see
3338	     if we can nevertheless deduce some things about the
3339	     comparison.  */
3340	  if (const_arg0 == 0 || const_arg1 == 0)
3341	    {
3342	      if (const_arg1 != NULL)
3343		{
3344		  rtx cheapest_simplification;
3345		  int cheapest_cost;
3346		  rtx simp_result;
3347		  struct table_elt *p;
3348
3349		  /* See if we can find an equivalent of folded_arg0
3350		     that gets us a cheaper expression, possibly a
3351		     constant through simplifications.  */
3352		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3353			      mode_arg0);
3354
3355		  if (p != NULL)
3356		    {
3357		      cheapest_simplification = x;
3358		      cheapest_cost = COST (x);
3359
3360		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3361			{
3362			  int cost;
3363
3364			  /* If the entry isn't valid, skip it.  */
3365			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3366			    continue;
3367
3368			  /* Try to simplify using this equivalence.  */
3369			  simp_result
3370			    = simplify_relational_operation (code, mode,
3371							     mode_arg0,
3372							     p->exp,
3373							     const_arg1);
3374
3375			  if (simp_result == NULL)
3376			    continue;
3377
3378			  cost = COST (simp_result);
3379			  if (cost < cheapest_cost)
3380			    {
3381			      cheapest_cost = cost;
3382			      cheapest_simplification = simp_result;
3383			    }
3384			}
3385
3386		      /* If we have a cheaper expression now, use that
3387			 and try folding it further, from the top.  */
3388		      if (cheapest_simplification != x)
3389			return fold_rtx (copy_rtx (cheapest_simplification),
3390					 insn);
3391		    }
3392		}
3393
3394	      /* See if the two operands are the same.  */
3395
3396	      if ((REG_P (folded_arg0)
3397		   && REG_P (folded_arg1)
3398		   && (REG_QTY (REGNO (folded_arg0))
3399		       == REG_QTY (REGNO (folded_arg1))))
3400		  || ((p0 = lookup (folded_arg0,
3401				    SAFE_HASH (folded_arg0, mode_arg0),
3402				    mode_arg0))
3403		      && (p1 = lookup (folded_arg1,
3404				       SAFE_HASH (folded_arg1, mode_arg0),
3405				       mode_arg0))
3406		      && p0->first_same_value == p1->first_same_value))
3407		folded_arg1 = folded_arg0;
3408
3409	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3410		 doing now is either the same as we did before or the reverse
3411		 (we only check the reverse if not floating-point).  */
3412	      else if (REG_P (folded_arg0))
3413		{
3414		  int qty = REG_QTY (REGNO (folded_arg0));
3415
3416		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3417		    {
3418		      struct qty_table_elem *ent = &qty_table[qty];
3419
3420		      if ((comparison_dominates_p (ent->comparison_code, code)
3421			   || (! FLOAT_MODE_P (mode_arg0)
3422			       && comparison_dominates_p (ent->comparison_code,
3423						          reverse_condition (code))))
3424			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3425			      || (const_arg1
3426				  && rtx_equal_p (ent->comparison_const,
3427						  const_arg1))
3428			      || (REG_P (folded_arg1)
3429				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3430			{
3431			  if (comparison_dominates_p (ent->comparison_code, code))
3432			    {
3433			      if (true_rtx)
3434				return true_rtx;
3435			      else
3436				break;
3437			    }
3438			  else
3439			    return false_rtx;
3440			}
3441		    }
3442		}
3443	    }
3444	}
3445
3446      /* If we are comparing against zero, see if the first operand is
3447	 equivalent to an IOR with a constant.  If so, we may be able to
3448	 determine the result of this comparison.  */
3449      if (const_arg1 == const0_rtx && !const_arg0)
3450	{
3451	  rtx y = lookup_as_function (folded_arg0, IOR);
3452	  rtx inner_const;
3453
3454	  if (y != 0
3455	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3456	      && CONST_INT_P (inner_const)
3457	      && INTVAL (inner_const) != 0)
3458	    folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3459	}
3460
3461      {
3462	rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3463	rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3464	new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3465						 op0, op1);
3466      }
3467      break;
3468
3469    case RTX_BIN_ARITH:
3470    case RTX_COMM_ARITH:
3471      switch (code)
3472	{
3473	case PLUS:
3474	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
3475	     with that LABEL_REF as its second operand.  If so, the result is
3476	     the first operand of that MINUS.  This handles switches with an
3477	     ADDR_DIFF_VEC table.  */
3478	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3479	    {
3480	      rtx y
3481		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
3482		: lookup_as_function (folded_arg0, MINUS);
3483
3484	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3485		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3486		return XEXP (y, 0);
3487
3488	      /* Now try for a CONST of a MINUS like the above.  */
3489	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3490			: lookup_as_function (folded_arg0, CONST))) != 0
3491		  && GET_CODE (XEXP (y, 0)) == MINUS
3492		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3493		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3494		return XEXP (XEXP (y, 0), 0);
3495	    }
3496
3497	  /* Likewise if the operands are in the other order.  */
3498	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3499	    {
3500	      rtx y
3501		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
3502		: lookup_as_function (folded_arg1, MINUS);
3503
3504	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3505		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3506		return XEXP (y, 0);
3507
3508	      /* Now try for a CONST of a MINUS like the above.  */
3509	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3510			: lookup_as_function (folded_arg1, CONST))) != 0
3511		  && GET_CODE (XEXP (y, 0)) == MINUS
3512		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3513		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3514		return XEXP (XEXP (y, 0), 0);
3515	    }
3516
3517	  /* If second operand is a register equivalent to a negative
3518	     CONST_INT, see if we can find a register equivalent to the
3519	     positive constant.  Make a MINUS if so.  Don't do this for
3520	     a non-negative constant since we might then alternate between
3521	     choosing positive and negative constants.  Having the positive
3522	     constant previously-used is the more common case.  Be sure
3523	     the resulting constant is non-negative; if const_arg1 were
3524	     the smallest negative number this would overflow: depending
3525	     on the mode, this would either just be the same value (and
3526	     hence not save anything) or be incorrect.  */
3527	  if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3528	      && INTVAL (const_arg1) < 0
3529	      /* This used to test
3530
3531	         -INTVAL (const_arg1) >= 0
3532
3533		 But The Sun V5.0 compilers mis-compiled that test.  So
3534		 instead we test for the problematic value in a more direct
3535		 manner and hope the Sun compilers get it correct.  */
3536	      && INTVAL (const_arg1) !=
3537	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3538	      && REG_P (folded_arg1))
3539	    {
3540	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
3541	      struct table_elt *p
3542		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
3543
3544	      if (p)
3545		for (p = p->first_same_value; p; p = p->next_same_value)
3546		  if (REG_P (p->exp))
3547		    return simplify_gen_binary (MINUS, mode, folded_arg0,
3548						canon_reg (p->exp, NULL_RTX));
3549	    }
3550	  goto from_plus;
3551
3552	case MINUS:
3553	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3554	     If so, produce (PLUS Z C2-C).  */
3555	  if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3556	    {
3557	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3558	      if (y && CONST_INT_P (XEXP (y, 1)))
3559		return fold_rtx (plus_constant (mode, copy_rtx (y),
3560						-INTVAL (const_arg1)),
3561				 NULL_RTX);
3562	    }
3563
3564	  /* Fall through.  */
3565
3566	from_plus:
3567	case SMIN:    case SMAX:      case UMIN:    case UMAX:
3568	case IOR:     case AND:       case XOR:
3569	case MULT:
3570	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
3571	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3572	     is known to be of similar form, we may be able to replace the
3573	     operation with a combined operation.  This may eliminate the
3574	     intermediate operation if every use is simplified in this way.
3575	     Note that the similar optimization done by combine.c only works
3576	     if the intermediate operation's result has only one reference.  */
3577
3578	  if (REG_P (folded_arg0)
3579	      && const_arg1 && CONST_INT_P (const_arg1))
3580	    {
3581	      int is_shift
3582		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3583	      rtx y, inner_const, new_const;
3584	      rtx canon_const_arg1 = const_arg1;
3585	      enum rtx_code associate_code;
3586
3587	      if (is_shift
3588		  && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
3589		      || INTVAL (const_arg1) < 0))
3590		{
3591		  if (SHIFT_COUNT_TRUNCATED)
3592		    canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3593						& (GET_MODE_BITSIZE (mode)
3594						   - 1));
3595		  else
3596		    break;
3597		}
3598
3599	      y = lookup_as_function (folded_arg0, code);
3600	      if (y == 0)
3601		break;
3602
3603	      /* If we have compiled a statement like
3604		 "if (x == (x & mask1))", and now are looking at
3605		 "x & mask2", we will have a case where the first operand
3606		 of Y is the same as our first operand.  Unless we detect
3607		 this case, an infinite loop will result.  */
3608	      if (XEXP (y, 0) == folded_arg0)
3609		break;
3610
3611	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3612	      if (!inner_const || !CONST_INT_P (inner_const))
3613		break;
3614
3615	      /* Don't associate these operations if they are a PLUS with the
3616		 same constant and it is a power of two.  These might be doable
3617		 with a pre- or post-increment.  Similarly for two subtracts of
3618		 identical powers of two with post decrement.  */
3619
3620	      if (code == PLUS && const_arg1 == inner_const
3621		  && ((HAVE_PRE_INCREMENT
3622			  && exact_log2 (INTVAL (const_arg1)) >= 0)
3623		      || (HAVE_POST_INCREMENT
3624			  && exact_log2 (INTVAL (const_arg1)) >= 0)
3625		      || (HAVE_PRE_DECREMENT
3626			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
3627		      || (HAVE_POST_DECREMENT
3628			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3629		break;
3630
3631	      /* ??? Vector mode shifts by scalar
3632		 shift operand are not supported yet.  */
3633	      if (is_shift && VECTOR_MODE_P (mode))
3634                break;
3635
3636	      if (is_shift
3637		  && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
3638		      || INTVAL (inner_const) < 0))
3639		{
3640		  if (SHIFT_COUNT_TRUNCATED)
3641		    inner_const = GEN_INT (INTVAL (inner_const)
3642					   & (GET_MODE_BITSIZE (mode) - 1));
3643		  else
3644		    break;
3645		}
3646
3647	      /* Compute the code used to compose the constants.  For example,
3648		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
3649
3650	      associate_code = (is_shift || code == MINUS ? PLUS : code);
3651
3652	      new_const = simplify_binary_operation (associate_code, mode,
3653						     canon_const_arg1,
3654						     inner_const);
3655
3656	      if (new_const == 0)
3657		break;
3658
3659	      /* If we are associating shift operations, don't let this
3660		 produce a shift of the size of the object or larger.
3661		 This could occur when we follow a sign-extend by a right
3662		 shift on a machine that does a sign-extend as a pair
3663		 of shifts.  */
3664
3665	      if (is_shift
3666		  && CONST_INT_P (new_const)
3667		  && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
3668		{
3669		  /* As an exception, we can turn an ASHIFTRT of this
3670		     form into a shift of the number of bits - 1.  */
3671		  if (code == ASHIFTRT)
3672		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3673		  else if (!side_effects_p (XEXP (y, 0)))
3674		    return CONST0_RTX (mode);
3675		  else
3676		    break;
3677		}
3678
3679	      y = copy_rtx (XEXP (y, 0));
3680
3681	      /* If Y contains our first operand (the most common way this
3682		 can happen is if Y is a MEM), we would do into an infinite
3683		 loop if we tried to fold it.  So don't in that case.  */
3684
3685	      if (! reg_mentioned_p (folded_arg0, y))
3686		y = fold_rtx (y, insn);
3687
3688	      return simplify_gen_binary (code, mode, y, new_const);
3689	    }
3690	  break;
3691
3692	case DIV:       case UDIV:
3693	  /* ??? The associative optimization performed immediately above is
3694	     also possible for DIV and UDIV using associate_code of MULT.
3695	     However, we would need extra code to verify that the
3696	     multiplication does not overflow, that is, there is no overflow
3697	     in the calculation of new_const.  */
3698	  break;
3699
3700	default:
3701	  break;
3702	}
3703
3704      new_rtx = simplify_binary_operation (code, mode,
3705				       const_arg0 ? const_arg0 : folded_arg0,
3706				       const_arg1 ? const_arg1 : folded_arg1);
3707      break;
3708
3709    case RTX_OBJ:
3710      /* (lo_sum (high X) X) is simply X.  */
3711      if (code == LO_SUM && const_arg0 != 0
3712	  && GET_CODE (const_arg0) == HIGH
3713	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3714	return const_arg1;
3715      break;
3716
3717    case RTX_TERNARY:
3718    case RTX_BITFIELD_OPS:
3719      new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3720					const_arg0 ? const_arg0 : folded_arg0,
3721					const_arg1 ? const_arg1 : folded_arg1,
3722					const_arg2 ? const_arg2 : XEXP (x, 2));
3723      break;
3724
3725    default:
3726      break;
3727    }
3728
3729  return new_rtx ? new_rtx : x;
3730}
3731
3732/* Return a constant value currently equivalent to X.
3733   Return 0 if we don't know one.  */
3734
3735static rtx
3736equiv_constant (rtx x)
3737{
3738  if (REG_P (x)
3739      && REGNO_QTY_VALID_P (REGNO (x)))
3740    {
3741      int x_q = REG_QTY (REGNO (x));
3742      struct qty_table_elem *x_ent = &qty_table[x_q];
3743
3744      if (x_ent->const_rtx)
3745	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3746    }
3747
3748  if (x == 0 || CONSTANT_P (x))
3749    return x;
3750
3751  if (GET_CODE (x) == SUBREG)
3752    {
3753      enum machine_mode mode = GET_MODE (x);
3754      enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3755      rtx new_rtx;
3756
3757      /* See if we previously assigned a constant value to this SUBREG.  */
3758      if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3759          || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3760          || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3761        return new_rtx;
3762
3763      /* If we didn't and if doing so makes sense, see if we previously
3764	 assigned a constant value to the enclosing word mode SUBREG.  */
3765      if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3766	  && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3767	{
3768	  int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3769	  if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3770	    {
3771	      rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3772	      new_rtx = lookup_as_function (y, CONST_INT);
3773	      if (new_rtx)
3774		return gen_lowpart (mode, new_rtx);
3775	    }
3776	}
3777
3778      /* Otherwise see if we already have a constant for the inner REG,
3779	 and if that is enough to calculate an equivalent constant for
3780	 the subreg.  Note that the upper bits of paradoxical subregs
3781	 are undefined, so they cannot be said to equal anything.  */
3782      if (REG_P (SUBREG_REG (x))
3783	  && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (imode)
3784	  && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3785        return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3786
3787      return 0;
3788    }
3789
3790  /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3791     the hash table in case its value was seen before.  */
3792
3793  if (MEM_P (x))
3794    {
3795      struct table_elt *elt;
3796
3797      x = avoid_constant_pool_reference (x);
3798      if (CONSTANT_P (x))
3799	return x;
3800
3801      elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3802      if (elt == 0)
3803	return 0;
3804
3805      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3806	if (elt->is_const && CONSTANT_P (elt->exp))
3807	  return elt->exp;
3808    }
3809
3810  return 0;
3811}
3812
3813/* Given INSN, a jump insn, TAKEN indicates if we are following the
3814   "taken" branch.
3815
3816   In certain cases, this can cause us to add an equivalence.  For example,
3817   if we are following the taken case of
3818	if (i == 2)
3819   we can add the fact that `i' and '2' are now equivalent.
3820
3821   In any case, we can record that this comparison was passed.  If the same
3822   comparison is seen later, we will know its value.  */
3823
3824static void
3825record_jump_equiv (rtx insn, bool taken)
3826{
3827  int cond_known_true;
3828  rtx op0, op1;
3829  rtx set;
3830  enum machine_mode mode, mode0, mode1;
3831  int reversed_nonequality = 0;
3832  enum rtx_code code;
3833
3834  /* Ensure this is the right kind of insn.  */
3835  gcc_assert (any_condjump_p (insn));
3836
3837  set = pc_set (insn);
3838
3839  /* See if this jump condition is known true or false.  */
3840  if (taken)
3841    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3842  else
3843    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3844
3845  /* Get the type of comparison being done and the operands being compared.
3846     If we had to reverse a non-equality condition, record that fact so we
3847     know that it isn't valid for floating-point.  */
3848  code = GET_CODE (XEXP (SET_SRC (set), 0));
3849  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3850  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3851
3852  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3853  if (! cond_known_true)
3854    {
3855      code = reversed_comparison_code_parts (code, op0, op1, insn);
3856
3857      /* Don't remember if we can't find the inverse.  */
3858      if (code == UNKNOWN)
3859	return;
3860    }
3861
3862  /* The mode is the mode of the non-constant.  */
3863  mode = mode0;
3864  if (mode1 != VOIDmode)
3865    mode = mode1;
3866
3867  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3868}
3869
3870/* Yet another form of subreg creation.  In this case, we want something in
3871   MODE, and we should assume OP has MODE iff it is naturally modeless.  */
3872
3873static rtx
3874record_jump_cond_subreg (enum machine_mode mode, rtx op)
3875{
3876  enum machine_mode op_mode = GET_MODE (op);
3877  if (op_mode == mode || op_mode == VOIDmode)
3878    return op;
3879  return lowpart_subreg (mode, op, op_mode);
3880}
3881
3882/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3883   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3884   Make any useful entries we can with that information.  Called from
3885   above function and called recursively.  */
3886
3887static void
3888record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3889		  rtx op1, int reversed_nonequality)
3890{
3891  unsigned op0_hash, op1_hash;
3892  int op0_in_memory, op1_in_memory;
3893  struct table_elt *op0_elt, *op1_elt;
3894
3895  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3896     we know that they are also equal in the smaller mode (this is also
3897     true for all smaller modes whether or not there is a SUBREG, but
3898     is not worth testing for with no SUBREG).  */
3899
3900  /* Note that GET_MODE (op0) may not equal MODE.  */
3901  if (code == EQ && paradoxical_subreg_p (op0))
3902    {
3903      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3904      rtx tem = record_jump_cond_subreg (inner_mode, op1);
3905      if (tem)
3906	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3907			  reversed_nonequality);
3908    }
3909
3910  if (code == EQ && paradoxical_subreg_p (op1))
3911    {
3912      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3913      rtx tem = record_jump_cond_subreg (inner_mode, op0);
3914      if (tem)
3915	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3916			  reversed_nonequality);
3917    }
3918
3919  /* Similarly, if this is an NE comparison, and either is a SUBREG
3920     making a smaller mode, we know the whole thing is also NE.  */
3921
3922  /* Note that GET_MODE (op0) may not equal MODE;
3923     if we test MODE instead, we can get an infinite recursion
3924     alternating between two modes each wider than MODE.  */
3925
3926  if (code == NE && GET_CODE (op0) == SUBREG
3927      && subreg_lowpart_p (op0)
3928      && (GET_MODE_SIZE (GET_MODE (op0))
3929	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3930    {
3931      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3932      rtx tem = record_jump_cond_subreg (inner_mode, op1);
3933      if (tem)
3934	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3935			  reversed_nonequality);
3936    }
3937
3938  if (code == NE && GET_CODE (op1) == SUBREG
3939      && subreg_lowpart_p (op1)
3940      && (GET_MODE_SIZE (GET_MODE (op1))
3941	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3942    {
3943      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3944      rtx tem = record_jump_cond_subreg (inner_mode, op0);
3945      if (tem)
3946	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3947			  reversed_nonequality);
3948    }
3949
3950  /* Hash both operands.  */
3951
3952  do_not_record = 0;
3953  hash_arg_in_memory = 0;
3954  op0_hash = HASH (op0, mode);
3955  op0_in_memory = hash_arg_in_memory;
3956
3957  if (do_not_record)
3958    return;
3959
3960  do_not_record = 0;
3961  hash_arg_in_memory = 0;
3962  op1_hash = HASH (op1, mode);
3963  op1_in_memory = hash_arg_in_memory;
3964
3965  if (do_not_record)
3966    return;
3967
3968  /* Look up both operands.  */
3969  op0_elt = lookup (op0, op0_hash, mode);
3970  op1_elt = lookup (op1, op1_hash, mode);
3971
3972  /* If both operands are already equivalent or if they are not in the
3973     table but are identical, do nothing.  */
3974  if ((op0_elt != 0 && op1_elt != 0
3975       && op0_elt->first_same_value == op1_elt->first_same_value)
3976      || op0 == op1 || rtx_equal_p (op0, op1))
3977    return;
3978
3979  /* If we aren't setting two things equal all we can do is save this
3980     comparison.   Similarly if this is floating-point.  In the latter
3981     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
3982     If we record the equality, we might inadvertently delete code
3983     whose intent was to change -0 to +0.  */
3984
3985  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
3986    {
3987      struct qty_table_elem *ent;
3988      int qty;
3989
3990      /* If we reversed a floating-point comparison, if OP0 is not a
3991	 register, or if OP1 is neither a register or constant, we can't
3992	 do anything.  */
3993
3994      if (!REG_P (op1))
3995	op1 = equiv_constant (op1);
3996
3997      if ((reversed_nonequality && FLOAT_MODE_P (mode))
3998	  || !REG_P (op0) || op1 == 0)
3999	return;
4000
4001      /* Put OP0 in the hash table if it isn't already.  This gives it a
4002	 new quantity number.  */
4003      if (op0_elt == 0)
4004	{
4005	  if (insert_regs (op0, NULL, 0))
4006	    {
4007	      rehash_using_reg (op0);
4008	      op0_hash = HASH (op0, mode);
4009
4010	      /* If OP0 is contained in OP1, this changes its hash code
4011		 as well.  Faster to rehash than to check, except
4012		 for the simple case of a constant.  */
4013	      if (! CONSTANT_P (op1))
4014		op1_hash = HASH (op1,mode);
4015	    }
4016
4017	  op0_elt = insert (op0, NULL, op0_hash, mode);
4018	  op0_elt->in_memory = op0_in_memory;
4019	}
4020
4021      qty = REG_QTY (REGNO (op0));
4022      ent = &qty_table[qty];
4023
4024      ent->comparison_code = code;
4025      if (REG_P (op1))
4026	{
4027	  /* Look it up again--in case op0 and op1 are the same.  */
4028	  op1_elt = lookup (op1, op1_hash, mode);
4029
4030	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4031	  if (op1_elt == 0)
4032	    {
4033	      if (insert_regs (op1, NULL, 0))
4034		{
4035		  rehash_using_reg (op1);
4036		  op1_hash = HASH (op1, mode);
4037		}
4038
4039	      op1_elt = insert (op1, NULL, op1_hash, mode);
4040	      op1_elt->in_memory = op1_in_memory;
4041	    }
4042
4043	  ent->comparison_const = NULL_RTX;
4044	  ent->comparison_qty = REG_QTY (REGNO (op1));
4045	}
4046      else
4047	{
4048	  ent->comparison_const = op1;
4049	  ent->comparison_qty = -1;
4050	}
4051
4052      return;
4053    }
4054
4055  /* If either side is still missing an equivalence, make it now,
4056     then merge the equivalences.  */
4057
4058  if (op0_elt == 0)
4059    {
4060      if (insert_regs (op0, NULL, 0))
4061	{
4062	  rehash_using_reg (op0);
4063	  op0_hash = HASH (op0, mode);
4064	}
4065
4066      op0_elt = insert (op0, NULL, op0_hash, mode);
4067      op0_elt->in_memory = op0_in_memory;
4068    }
4069
4070  if (op1_elt == 0)
4071    {
4072      if (insert_regs (op1, NULL, 0))
4073	{
4074	  rehash_using_reg (op1);
4075	  op1_hash = HASH (op1, mode);
4076	}
4077
4078      op1_elt = insert (op1, NULL, op1_hash, mode);
4079      op1_elt->in_memory = op1_in_memory;
4080    }
4081
4082  merge_equiv_classes (op0_elt, op1_elt);
4083}
4084
4085/* CSE processing for one instruction.
4086
4087   Most "true" common subexpressions are mostly optimized away in GIMPLE,
4088   but the few that "leak through" are cleaned up by cse_insn, and complex
4089   addressing modes are often formed here.
4090
4091   The main function is cse_insn, and between here and that function
4092   a couple of helper functions is defined to keep the size of cse_insn
4093   within reasonable proportions.
4094
4095   Data is shared between the main and helper functions via STRUCT SET,
4096   that contains all data related for every set in the instruction that
4097   is being processed.
4098
4099   Note that cse_main processes all sets in the instruction.  Most
4100   passes in GCC only process simple SET insns or single_set insns, but
4101   CSE processes insns with multiple sets as well.  */
4102
4103/* Data on one SET contained in the instruction.  */
4104
4105struct set
4106{
4107  /* The SET rtx itself.  */
4108  rtx rtl;
4109  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4110  rtx src;
4111  /* The hash-table element for the SET_SRC of the SET.  */
4112  struct table_elt *src_elt;
4113  /* Hash value for the SET_SRC.  */
4114  unsigned src_hash;
4115  /* Hash value for the SET_DEST.  */
4116  unsigned dest_hash;
4117  /* The SET_DEST, with SUBREG, etc., stripped.  */
4118  rtx inner_dest;
4119  /* Nonzero if the SET_SRC is in memory.  */
4120  char src_in_memory;
4121  /* Nonzero if the SET_SRC contains something
4122     whose value cannot be predicted and understood.  */
4123  char src_volatile;
4124  /* Original machine mode, in case it becomes a CONST_INT.
4125     The size of this field should match the size of the mode
4126     field of struct rtx_def (see rtl.h).  */
4127  ENUM_BITFIELD(machine_mode) mode : 8;
4128  /* A constant equivalent for SET_SRC, if any.  */
4129  rtx src_const;
4130  /* Hash value of constant equivalent for SET_SRC.  */
4131  unsigned src_const_hash;
4132  /* Table entry for constant equivalent for SET_SRC, if any.  */
4133  struct table_elt *src_const_elt;
4134  /* Table entry for the destination address.  */
4135  struct table_elt *dest_addr_elt;
4136};
4137
4138/* Special handling for (set REG0 REG1) where REG0 is the
4139   "cheapest", cheaper than REG1.  After cse, REG1 will probably not
4140   be used in the sequel, so (if easily done) change this insn to
4141   (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4142   that computed their value.  Then REG1 will become a dead store
4143   and won't cloud the situation for later optimizations.
4144
4145   Do not make this change if REG1 is a hard register, because it will
4146   then be used in the sequel and we may be changing a two-operand insn
4147   into a three-operand insn.
4148
4149   This is the last transformation that cse_insn will try to do.  */
4150
4151static void
4152try_back_substitute_reg (rtx set, rtx insn)
4153{
4154  rtx dest = SET_DEST (set);
4155  rtx src = SET_SRC (set);
4156
4157  if (REG_P (dest)
4158      && REG_P (src) && ! HARD_REGISTER_P (src)
4159      && REGNO_QTY_VALID_P (REGNO (src)))
4160    {
4161      int src_q = REG_QTY (REGNO (src));
4162      struct qty_table_elem *src_ent = &qty_table[src_q];
4163
4164      if (src_ent->first_reg == REGNO (dest))
4165	{
4166	  /* Scan for the previous nonnote insn, but stop at a basic
4167	     block boundary.  */
4168	  rtx prev = insn;
4169	  rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
4170	  do
4171	    {
4172	      prev = PREV_INSN (prev);
4173	    }
4174	  while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
4175
4176	  /* Do not swap the registers around if the previous instruction
4177	     attaches a REG_EQUIV note to REG1.
4178
4179	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
4180	     from the pseudo that originally shadowed an incoming argument
4181	     to another register.  Some uses of REG_EQUIV might rely on it
4182	     being attached to REG1 rather than REG2.
4183
4184	     This section previously turned the REG_EQUIV into a REG_EQUAL
4185	     note.  We cannot do that because REG_EQUIV may provide an
4186	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
4187	  if (NONJUMP_INSN_P (prev)
4188	      && GET_CODE (PATTERN (prev)) == SET
4189	      && SET_DEST (PATTERN (prev)) == src
4190	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4191	    {
4192	      rtx note;
4193
4194	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4195	      validate_change (insn, &SET_DEST (set), src, 1);
4196	      validate_change (insn, &SET_SRC (set), dest, 1);
4197	      apply_change_group ();
4198
4199	      /* If INSN has a REG_EQUAL note, and this note mentions
4200		 REG0, then we must delete it, because the value in
4201		 REG0 has changed.  If the note's value is REG1, we must
4202		 also delete it because that is now this insn's dest.  */
4203	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4204	      if (note != 0
4205		  && (reg_mentioned_p (dest, XEXP (note, 0))
4206		      || rtx_equal_p (src, XEXP (note, 0))))
4207		remove_note (insn, note);
4208	    }
4209	}
4210    }
4211}
4212
4213/* Record all the SETs in this instruction into SETS_PTR,
4214   and return the number of recorded sets.  */
4215static int
4216find_sets_in_insn (rtx insn, struct set **psets)
4217{
4218  struct set *sets = *psets;
4219  int n_sets = 0;
4220  rtx x = PATTERN (insn);
4221
4222  if (GET_CODE (x) == SET)
4223    {
4224      /* Ignore SETs that are unconditional jumps.
4225	 They never need cse processing, so this does not hurt.
4226	 The reason is not efficiency but rather
4227	 so that we can test at the end for instructions
4228	 that have been simplified to unconditional jumps
4229	 and not be misled by unchanged instructions
4230	 that were unconditional jumps to begin with.  */
4231      if (SET_DEST (x) == pc_rtx
4232	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4233	;
4234      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4235	 The hard function value register is used only once, to copy to
4236	 someplace else, so it isn't worth cse'ing.  */
4237      else if (GET_CODE (SET_SRC (x)) == CALL)
4238	;
4239      else
4240	sets[n_sets++].rtl = x;
4241    }
4242  else if (GET_CODE (x) == PARALLEL)
4243    {
4244      int i, lim = XVECLEN (x, 0);
4245
4246      /* Go over the epressions of the PARALLEL in forward order, to
4247	 put them in the same order in the SETS array.  */
4248      for (i = 0; i < lim; i++)
4249	{
4250	  rtx y = XVECEXP (x, 0, i);
4251	  if (GET_CODE (y) == SET)
4252	    {
4253	      /* As above, we ignore unconditional jumps and call-insns and
4254		 ignore the result of apply_change_group.  */
4255	      if (SET_DEST (y) == pc_rtx
4256		  && GET_CODE (SET_SRC (y)) == LABEL_REF)
4257		;
4258	      else if (GET_CODE (SET_SRC (y)) == CALL)
4259		;
4260	      else
4261		sets[n_sets++].rtl = y;
4262	    }
4263	}
4264    }
4265
4266  return n_sets;
4267}
4268
4269/* Where possible, substitute every register reference in the N_SETS
4270   number of SETS in INSN with the the canonical register.
4271
4272   Register canonicalization propagatest the earliest register (i.e.
4273   one that is set before INSN) with the same value.  This is a very
4274   useful, simple form of CSE, to clean up warts from expanding GIMPLE
4275   to RTL.  For instance, a CONST for an address is usually expanded
4276   multiple times to loads into different registers, thus creating many
4277   subexpressions of the form:
4278
4279   (set (reg1) (some_const))
4280   (set (mem (... reg1 ...) (thing)))
4281   (set (reg2) (some_const))
4282   (set (mem (... reg2 ...) (thing)))
4283
4284   After canonicalizing, the code takes the following form:
4285
4286   (set (reg1) (some_const))
4287   (set (mem (... reg1 ...) (thing)))
4288   (set (reg2) (some_const))
4289   (set (mem (... reg1 ...) (thing)))
4290
4291   The set to reg2 is now trivially dead, and the memory reference (or
4292   address, or whatever) may be a candidate for further CSEing.
4293
4294   In this function, the result of apply_change_group can be ignored;
4295   see canon_reg.  */
4296
4297static void
4298canonicalize_insn (rtx insn, struct set **psets, int n_sets)
4299{
4300  struct set *sets = *psets;
4301  rtx tem;
4302  rtx x = PATTERN (insn);
4303  int i;
4304
4305  if (CALL_P (insn))
4306    {
4307      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4308	if (GET_CODE (XEXP (tem, 0)) != SET)
4309	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4310    }
4311
4312  if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4313    {
4314      canon_reg (SET_SRC (x), insn);
4315      apply_change_group ();
4316      fold_rtx (SET_SRC (x), insn);
4317    }
4318  else if (GET_CODE (x) == CLOBBER)
4319    {
4320      /* If we clobber memory, canon the address.
4321	 This does nothing when a register is clobbered
4322	 because we have already invalidated the reg.  */
4323      if (MEM_P (XEXP (x, 0)))
4324	canon_reg (XEXP (x, 0), insn);
4325    }
4326  else if (GET_CODE (x) == USE
4327	   && ! (REG_P (XEXP (x, 0))
4328		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4329    /* Canonicalize a USE of a pseudo register or memory location.  */
4330    canon_reg (x, insn);
4331  else if (GET_CODE (x) == ASM_OPERANDS)
4332    {
4333      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4334	{
4335	  rtx input = ASM_OPERANDS_INPUT (x, i);
4336	  if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER))
4337	    {
4338	      input = canon_reg (input, insn);
4339	      validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4340	    }
4341	}
4342    }
4343  else if (GET_CODE (x) == CALL)
4344    {
4345      canon_reg (x, insn);
4346      apply_change_group ();
4347      fold_rtx (x, insn);
4348    }
4349  else if (DEBUG_INSN_P (insn))
4350    canon_reg (PATTERN (insn), insn);
4351  else if (GET_CODE (x) == PARALLEL)
4352    {
4353      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4354	{
4355	  rtx y = XVECEXP (x, 0, i);
4356	  if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4357	    {
4358	      canon_reg (SET_SRC (y), insn);
4359	      apply_change_group ();
4360	      fold_rtx (SET_SRC (y), insn);
4361	    }
4362	  else if (GET_CODE (y) == CLOBBER)
4363	    {
4364	      if (MEM_P (XEXP (y, 0)))
4365		canon_reg (XEXP (y, 0), insn);
4366	    }
4367	  else if (GET_CODE (y) == USE
4368		   && ! (REG_P (XEXP (y, 0))
4369			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4370	    canon_reg (y, insn);
4371	  else if (GET_CODE (y) == CALL)
4372	    {
4373	      canon_reg (y, insn);
4374	      apply_change_group ();
4375	      fold_rtx (y, insn);
4376	    }
4377	}
4378    }
4379
4380  if (n_sets == 1 && REG_NOTES (insn) != 0
4381      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4382    {
4383      /* We potentially will process this insn many times.  Therefore,
4384	 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4385	 unique set in INSN.
4386
4387	 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4388	 because cse_insn handles those specially.  */
4389      if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4390	  && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4391	remove_note (insn, tem);
4392      else
4393	{
4394	  canon_reg (XEXP (tem, 0), insn);
4395	  apply_change_group ();
4396	  XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4397	  df_notes_rescan (insn);
4398	}
4399    }
4400
4401  /* Canonicalize sources and addresses of destinations.
4402     We do this in a separate pass to avoid problems when a MATCH_DUP is
4403     present in the insn pattern.  In that case, we want to ensure that
4404     we don't break the duplicate nature of the pattern.  So we will replace
4405     both operands at the same time.  Otherwise, we would fail to find an
4406     equivalent substitution in the loop calling validate_change below.
4407
4408     We used to suppress canonicalization of DEST if it appears in SRC,
4409     but we don't do this any more.  */
4410
4411  for (i = 0; i < n_sets; i++)
4412    {
4413      rtx dest = SET_DEST (sets[i].rtl);
4414      rtx src = SET_SRC (sets[i].rtl);
4415      rtx new_rtx = canon_reg (src, insn);
4416
4417      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4418
4419      if (GET_CODE (dest) == ZERO_EXTRACT)
4420	{
4421	  validate_change (insn, &XEXP (dest, 1),
4422			   canon_reg (XEXP (dest, 1), insn), 1);
4423	  validate_change (insn, &XEXP (dest, 2),
4424			   canon_reg (XEXP (dest, 2), insn), 1);
4425	}
4426
4427      while (GET_CODE (dest) == SUBREG
4428	     || GET_CODE (dest) == ZERO_EXTRACT
4429	     || GET_CODE (dest) == STRICT_LOW_PART)
4430	dest = XEXP (dest, 0);
4431
4432      if (MEM_P (dest))
4433	canon_reg (dest, insn);
4434    }
4435
4436  /* Now that we have done all the replacements, we can apply the change
4437     group and see if they all work.  Note that this will cause some
4438     canonicalizations that would have worked individually not to be applied
4439     because some other canonicalization didn't work, but this should not
4440     occur often.
4441
4442     The result of apply_change_group can be ignored; see canon_reg.  */
4443
4444  apply_change_group ();
4445}
4446
4447/* Main function of CSE.
4448   First simplify sources and addresses of all assignments
4449   in the instruction, using previously-computed equivalents values.
4450   Then install the new sources and destinations in the table
4451   of available values.  */
4452
4453static void
4454cse_insn (rtx insn)
4455{
4456  rtx x = PATTERN (insn);
4457  int i;
4458  rtx tem;
4459  int n_sets = 0;
4460
4461  rtx src_eqv = 0;
4462  struct table_elt *src_eqv_elt = 0;
4463  int src_eqv_volatile = 0;
4464  int src_eqv_in_memory = 0;
4465  unsigned src_eqv_hash = 0;
4466
4467  struct set *sets = (struct set *) 0;
4468
4469  if (GET_CODE (x) == SET)
4470    sets = XALLOCA (struct set);
4471  else if (GET_CODE (x) == PARALLEL)
4472    sets = XALLOCAVEC (struct set, XVECLEN (x, 0));
4473
4474  this_insn = insn;
4475#ifdef HAVE_cc0
4476  /* Records what this insn does to set CC0.  */
4477  this_insn_cc0 = 0;
4478  this_insn_cc0_mode = VOIDmode;
4479#endif
4480
4481  /* Find all regs explicitly clobbered in this insn,
4482     to ensure they are not replaced with any other regs
4483     elsewhere in this insn.  */
4484  invalidate_from_sets_and_clobbers (insn);
4485
4486  /* Record all the SETs in this instruction.  */
4487  n_sets = find_sets_in_insn (insn, &sets);
4488
4489  /* Substitute the canonical register where possible.  */
4490  canonicalize_insn (insn, &sets, n_sets);
4491
4492  /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4493     if different, or if the DEST is a STRICT_LOW_PART.  The latter condition
4494     is necessary because SRC_EQV is handled specially for this case, and if
4495     it isn't set, then there will be no equivalence for the destination.  */
4496  if (n_sets == 1 && REG_NOTES (insn) != 0
4497      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4498      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4499	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4500    src_eqv = copy_rtx (XEXP (tem, 0));
4501
4502  /* Set sets[i].src_elt to the class each source belongs to.
4503     Detect assignments from or to volatile things
4504     and set set[i] to zero so they will be ignored
4505     in the rest of this function.
4506
4507     Nothing in this loop changes the hash table or the register chains.  */
4508
4509  for (i = 0; i < n_sets; i++)
4510    {
4511      bool repeat = false;
4512      rtx src, dest;
4513      rtx src_folded;
4514      struct table_elt *elt = 0, *p;
4515      enum machine_mode mode;
4516      rtx src_eqv_here;
4517      rtx src_const = 0;
4518      rtx src_related = 0;
4519      bool src_related_is_const_anchor = false;
4520      struct table_elt *src_const_elt = 0;
4521      int src_cost = MAX_COST;
4522      int src_eqv_cost = MAX_COST;
4523      int src_folded_cost = MAX_COST;
4524      int src_related_cost = MAX_COST;
4525      int src_elt_cost = MAX_COST;
4526      int src_regcost = MAX_COST;
4527      int src_eqv_regcost = MAX_COST;
4528      int src_folded_regcost = MAX_COST;
4529      int src_related_regcost = MAX_COST;
4530      int src_elt_regcost = MAX_COST;
4531      /* Set nonzero if we need to call force_const_mem on with the
4532	 contents of src_folded before using it.  */
4533      int src_folded_force_flag = 0;
4534
4535      dest = SET_DEST (sets[i].rtl);
4536      src = SET_SRC (sets[i].rtl);
4537
4538      /* If SRC is a constant that has no machine mode,
4539	 hash it with the destination's machine mode.
4540	 This way we can keep different modes separate.  */
4541
4542      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4543      sets[i].mode = mode;
4544
4545      if (src_eqv)
4546	{
4547	  enum machine_mode eqvmode = mode;
4548	  if (GET_CODE (dest) == STRICT_LOW_PART)
4549	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4550	  do_not_record = 0;
4551	  hash_arg_in_memory = 0;
4552	  src_eqv_hash = HASH (src_eqv, eqvmode);
4553
4554	  /* Find the equivalence class for the equivalent expression.  */
4555
4556	  if (!do_not_record)
4557	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4558
4559	  src_eqv_volatile = do_not_record;
4560	  src_eqv_in_memory = hash_arg_in_memory;
4561	}
4562
4563      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4564	 value of the INNER register, not the destination.  So it is not
4565	 a valid substitution for the source.  But save it for later.  */
4566      if (GET_CODE (dest) == STRICT_LOW_PART)
4567	src_eqv_here = 0;
4568      else
4569	src_eqv_here = src_eqv;
4570
4571      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4572	 simplified result, which may not necessarily be valid.  */
4573      src_folded = fold_rtx (src, insn);
4574
4575#if 0
4576      /* ??? This caused bad code to be generated for the m68k port with -O2.
4577	 Suppose src is (CONST_INT -1), and that after truncation src_folded
4578	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
4579	 At the end we will add src and src_const to the same equivalence
4580	 class.  We now have 3 and -1 on the same equivalence class.  This
4581	 causes later instructions to be mis-optimized.  */
4582      /* If storing a constant in a bitfield, pre-truncate the constant
4583	 so we will be able to record it later.  */
4584      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4585	{
4586	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4587
4588	  if (CONST_INT_P (src)
4589	      && CONST_INT_P (width)
4590	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4591	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4592	    src_folded
4593	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4594					  << INTVAL (width)) - 1));
4595	}
4596#endif
4597
4598      /* Compute SRC's hash code, and also notice if it
4599	 should not be recorded at all.  In that case,
4600	 prevent any further processing of this assignment.  */
4601      do_not_record = 0;
4602      hash_arg_in_memory = 0;
4603
4604      sets[i].src = src;
4605      sets[i].src_hash = HASH (src, mode);
4606      sets[i].src_volatile = do_not_record;
4607      sets[i].src_in_memory = hash_arg_in_memory;
4608
4609      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4610	 a pseudo, do not record SRC.  Using SRC as a replacement for
4611	 anything else will be incorrect in that situation.  Note that
4612	 this usually occurs only for stack slots, in which case all the
4613	 RTL would be referring to SRC, so we don't lose any optimization
4614	 opportunities by not having SRC in the hash table.  */
4615
4616      if (MEM_P (src)
4617	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4618	  && REG_P (dest)
4619	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4620	sets[i].src_volatile = 1;
4621
4622#if 0
4623      /* It is no longer clear why we used to do this, but it doesn't
4624	 appear to still be needed.  So let's try without it since this
4625	 code hurts cse'ing widened ops.  */
4626      /* If source is a paradoxical subreg (such as QI treated as an SI),
4627	 treat it as volatile.  It may do the work of an SI in one context
4628	 where the extra bits are not being used, but cannot replace an SI
4629	 in general.  */
4630      if (paradoxical_subreg_p (src))
4631	sets[i].src_volatile = 1;
4632#endif
4633
4634      /* Locate all possible equivalent forms for SRC.  Try to replace
4635         SRC in the insn with each cheaper equivalent.
4636
4637         We have the following types of equivalents: SRC itself, a folded
4638         version, a value given in a REG_EQUAL note, or a value related
4639	 to a constant.
4640
4641         Each of these equivalents may be part of an additional class
4642         of equivalents (if more than one is in the table, they must be in
4643         the same class; we check for this).
4644
4645	 If the source is volatile, we don't do any table lookups.
4646
4647         We note any constant equivalent for possible later use in a
4648         REG_NOTE.  */
4649
4650      if (!sets[i].src_volatile)
4651	elt = lookup (src, sets[i].src_hash, mode);
4652
4653      sets[i].src_elt = elt;
4654
4655      if (elt && src_eqv_here && src_eqv_elt)
4656	{
4657	  if (elt->first_same_value != src_eqv_elt->first_same_value)
4658	    {
4659	      /* The REG_EQUAL is indicating that two formerly distinct
4660		 classes are now equivalent.  So merge them.  */
4661	      merge_equiv_classes (elt, src_eqv_elt);
4662	      src_eqv_hash = HASH (src_eqv, elt->mode);
4663	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4664	    }
4665
4666	  src_eqv_here = 0;
4667	}
4668
4669      else if (src_eqv_elt)
4670	elt = src_eqv_elt;
4671
4672      /* Try to find a constant somewhere and record it in `src_const'.
4673	 Record its table element, if any, in `src_const_elt'.  Look in
4674	 any known equivalences first.  (If the constant is not in the
4675	 table, also set `sets[i].src_const_hash').  */
4676      if (elt)
4677	for (p = elt->first_same_value; p; p = p->next_same_value)
4678	  if (p->is_const)
4679	    {
4680	      src_const = p->exp;
4681	      src_const_elt = elt;
4682	      break;
4683	    }
4684
4685      if (src_const == 0
4686	  && (CONSTANT_P (src_folded)
4687	      /* Consider (minus (label_ref L1) (label_ref L2)) as
4688		 "constant" here so we will record it. This allows us
4689		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
4690	      || (GET_CODE (src_folded) == MINUS
4691		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4692		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4693	src_const = src_folded, src_const_elt = elt;
4694      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4695	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4696
4697      /* If we don't know if the constant is in the table, get its
4698	 hash code and look it up.  */
4699      if (src_const && src_const_elt == 0)
4700	{
4701	  sets[i].src_const_hash = HASH (src_const, mode);
4702	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4703	}
4704
4705      sets[i].src_const = src_const;
4706      sets[i].src_const_elt = src_const_elt;
4707
4708      /* If the constant and our source are both in the table, mark them as
4709	 equivalent.  Otherwise, if a constant is in the table but the source
4710	 isn't, set ELT to it.  */
4711      if (src_const_elt && elt
4712	  && src_const_elt->first_same_value != elt->first_same_value)
4713	merge_equiv_classes (elt, src_const_elt);
4714      else if (src_const_elt && elt == 0)
4715	elt = src_const_elt;
4716
4717      /* See if there is a register linearly related to a constant
4718         equivalent of SRC.  */
4719      if (src_const
4720	  && (GET_CODE (src_const) == CONST
4721	      || (src_const_elt && src_const_elt->related_value != 0)))
4722	{
4723	  src_related = use_related_value (src_const, src_const_elt);
4724	  if (src_related)
4725	    {
4726	      struct table_elt *src_related_elt
4727		= lookup (src_related, HASH (src_related, mode), mode);
4728	      if (src_related_elt && elt)
4729		{
4730		  if (elt->first_same_value
4731		      != src_related_elt->first_same_value)
4732		    /* This can occur when we previously saw a CONST
4733		       involving a SYMBOL_REF and then see the SYMBOL_REF
4734		       twice.  Merge the involved classes.  */
4735		    merge_equiv_classes (elt, src_related_elt);
4736
4737		  src_related = 0;
4738		  src_related_elt = 0;
4739		}
4740	      else if (src_related_elt && elt == 0)
4741		elt = src_related_elt;
4742	    }
4743	}
4744
4745      /* See if we have a CONST_INT that is already in a register in a
4746	 wider mode.  */
4747
4748      if (src_const && src_related == 0 && CONST_INT_P (src_const)
4749	  && GET_MODE_CLASS (mode) == MODE_INT
4750	  && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
4751	{
4752	  enum machine_mode wider_mode;
4753
4754	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
4755	       wider_mode != VOIDmode
4756	       && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
4757	       && src_related == 0;
4758	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4759	    {
4760	      struct table_elt *const_elt
4761		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4762
4763	      if (const_elt == 0)
4764		continue;
4765
4766	      for (const_elt = const_elt->first_same_value;
4767		   const_elt; const_elt = const_elt->next_same_value)
4768		if (REG_P (const_elt->exp))
4769		  {
4770		    src_related = gen_lowpart (mode, const_elt->exp);
4771		    break;
4772		  }
4773	    }
4774	}
4775
4776      /* Another possibility is that we have an AND with a constant in
4777	 a mode narrower than a word.  If so, it might have been generated
4778	 as part of an "if" which would narrow the AND.  If we already
4779	 have done the AND in a wider mode, we can use a SUBREG of that
4780	 value.  */
4781
4782      if (flag_expensive_optimizations && ! src_related
4783	  && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4784	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4785	{
4786	  enum machine_mode tmode;
4787	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4788
4789	  for (tmode = GET_MODE_WIDER_MODE (mode);
4790	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4791	       tmode = GET_MODE_WIDER_MODE (tmode))
4792	    {
4793	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4794	      struct table_elt *larger_elt;
4795
4796	      if (inner)
4797		{
4798		  PUT_MODE (new_and, tmode);
4799		  XEXP (new_and, 0) = inner;
4800		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4801		  if (larger_elt == 0)
4802		    continue;
4803
4804		  for (larger_elt = larger_elt->first_same_value;
4805		       larger_elt; larger_elt = larger_elt->next_same_value)
4806		    if (REG_P (larger_elt->exp))
4807		      {
4808			src_related
4809			  = gen_lowpart (mode, larger_elt->exp);
4810			break;
4811		      }
4812
4813		  if (src_related)
4814		    break;
4815		}
4816	    }
4817	}
4818
4819#ifdef LOAD_EXTEND_OP
4820      /* See if a MEM has already been loaded with a widening operation;
4821	 if it has, we can use a subreg of that.  Many CISC machines
4822	 also have such operations, but this is only likely to be
4823	 beneficial on these machines.  */
4824
4825      if (flag_expensive_optimizations && src_related == 0
4826	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4827	  && GET_MODE_CLASS (mode) == MODE_INT
4828	  && MEM_P (src) && ! do_not_record
4829	  && LOAD_EXTEND_OP (mode) != UNKNOWN)
4830	{
4831	  struct rtx_def memory_extend_buf;
4832	  rtx memory_extend_rtx = &memory_extend_buf;
4833	  enum machine_mode tmode;
4834
4835	  /* Set what we are trying to extend and the operation it might
4836	     have been extended with.  */
4837	  memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
4838	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4839	  XEXP (memory_extend_rtx, 0) = src;
4840
4841	  for (tmode = GET_MODE_WIDER_MODE (mode);
4842	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4843	       tmode = GET_MODE_WIDER_MODE (tmode))
4844	    {
4845	      struct table_elt *larger_elt;
4846
4847	      PUT_MODE (memory_extend_rtx, tmode);
4848	      larger_elt = lookup (memory_extend_rtx,
4849				   HASH (memory_extend_rtx, tmode), tmode);
4850	      if (larger_elt == 0)
4851		continue;
4852
4853	      for (larger_elt = larger_elt->first_same_value;
4854		   larger_elt; larger_elt = larger_elt->next_same_value)
4855		if (REG_P (larger_elt->exp))
4856		  {
4857		    src_related = gen_lowpart (mode, larger_elt->exp);
4858		    break;
4859		  }
4860
4861	      if (src_related)
4862		break;
4863	    }
4864	}
4865#endif /* LOAD_EXTEND_OP */
4866
4867      /* Try to express the constant using a register+offset expression
4868	 derived from a constant anchor.  */
4869
4870      if (targetm.const_anchor
4871	  && !src_related
4872	  && src_const
4873	  && GET_CODE (src_const) == CONST_INT)
4874	{
4875	  src_related = try_const_anchors (src_const, mode);
4876	  src_related_is_const_anchor = src_related != NULL_RTX;
4877	}
4878
4879
4880      if (src == src_folded)
4881	src_folded = 0;
4882
4883      /* At this point, ELT, if nonzero, points to a class of expressions
4884         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4885	 and SRC_RELATED, if nonzero, each contain additional equivalent
4886	 expressions.  Prune these latter expressions by deleting expressions
4887	 already in the equivalence class.
4888
4889	 Check for an equivalent identical to the destination.  If found,
4890	 this is the preferred equivalent since it will likely lead to
4891	 elimination of the insn.  Indicate this by placing it in
4892	 `src_related'.  */
4893
4894      if (elt)
4895	elt = elt->first_same_value;
4896      for (p = elt; p; p = p->next_same_value)
4897	{
4898	  enum rtx_code code = GET_CODE (p->exp);
4899
4900	  /* If the expression is not valid, ignore it.  Then we do not
4901	     have to check for validity below.  In most cases, we can use
4902	     `rtx_equal_p', since canonicalization has already been done.  */
4903	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4904	    continue;
4905
4906	  /* Also skip paradoxical subregs, unless that's what we're
4907	     looking for.  */
4908	  if (paradoxical_subreg_p (p->exp)
4909	      && ! (src != 0
4910		    && GET_CODE (src) == SUBREG
4911		    && GET_MODE (src) == GET_MODE (p->exp)
4912		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4913			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4914	    continue;
4915
4916	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4917	    src = 0;
4918	  else if (src_folded && GET_CODE (src_folded) == code
4919		   && rtx_equal_p (src_folded, p->exp))
4920	    src_folded = 0;
4921	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4922		   && rtx_equal_p (src_eqv_here, p->exp))
4923	    src_eqv_here = 0;
4924	  else if (src_related && GET_CODE (src_related) == code
4925		   && rtx_equal_p (src_related, p->exp))
4926	    src_related = 0;
4927
4928	  /* This is the same as the destination of the insns, we want
4929	     to prefer it.  Copy it to src_related.  The code below will
4930	     then give it a negative cost.  */
4931	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4932	    src_related = dest;
4933	}
4934
4935      /* Find the cheapest valid equivalent, trying all the available
4936         possibilities.  Prefer items not in the hash table to ones
4937         that are when they are equal cost.  Note that we can never
4938         worsen an insn as the current contents will also succeed.
4939	 If we find an equivalent identical to the destination, use it as best,
4940	 since this insn will probably be eliminated in that case.  */
4941      if (src)
4942	{
4943	  if (rtx_equal_p (src, dest))
4944	    src_cost = src_regcost = -1;
4945	  else
4946	    {
4947	      src_cost = COST (src);
4948	      src_regcost = approx_reg_cost (src);
4949	    }
4950	}
4951
4952      if (src_eqv_here)
4953	{
4954	  if (rtx_equal_p (src_eqv_here, dest))
4955	    src_eqv_cost = src_eqv_regcost = -1;
4956	  else
4957	    {
4958	      src_eqv_cost = COST (src_eqv_here);
4959	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
4960	    }
4961	}
4962
4963      if (src_folded)
4964	{
4965	  if (rtx_equal_p (src_folded, dest))
4966	    src_folded_cost = src_folded_regcost = -1;
4967	  else
4968	    {
4969	      src_folded_cost = COST (src_folded);
4970	      src_folded_regcost = approx_reg_cost (src_folded);
4971	    }
4972	}
4973
4974      if (src_related)
4975	{
4976	  if (rtx_equal_p (src_related, dest))
4977	    src_related_cost = src_related_regcost = -1;
4978	  else
4979	    {
4980	      src_related_cost = COST (src_related);
4981	      src_related_regcost = approx_reg_cost (src_related);
4982
4983	      /* If a const-anchor is used to synthesize a constant that
4984		 normally requires multiple instructions then slightly prefer
4985		 it over the original sequence.  These instructions are likely
4986		 to become redundant now.  We can't compare against the cost
4987		 of src_eqv_here because, on MIPS for example, multi-insn
4988		 constants have zero cost; they are assumed to be hoisted from
4989		 loops.  */
4990	      if (src_related_is_const_anchor
4991		  && src_related_cost == src_cost
4992		  && src_eqv_here)
4993		src_related_cost--;
4994	    }
4995	}
4996
4997      /* If this was an indirect jump insn, a known label will really be
4998	 cheaper even though it looks more expensive.  */
4999      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5000	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5001
5002      /* Terminate loop when replacement made.  This must terminate since
5003         the current contents will be tested and will always be valid.  */
5004      while (1)
5005	{
5006	  rtx trial;
5007
5008	  /* Skip invalid entries.  */
5009	  while (elt && !REG_P (elt->exp)
5010		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5011	    elt = elt->next_same_value;
5012
5013	  /* A paradoxical subreg would be bad here: it'll be the right
5014	     size, but later may be adjusted so that the upper bits aren't
5015	     what we want.  So reject it.  */
5016	  if (elt != 0
5017	      && paradoxical_subreg_p (elt->exp)
5018	      /* It is okay, though, if the rtx we're trying to match
5019		 will ignore any of the bits we can't predict.  */
5020	      && ! (src != 0
5021		    && GET_CODE (src) == SUBREG
5022		    && GET_MODE (src) == GET_MODE (elt->exp)
5023		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5024			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5025	    {
5026	      elt = elt->next_same_value;
5027	      continue;
5028	    }
5029
5030	  if (elt)
5031	    {
5032	      src_elt_cost = elt->cost;
5033	      src_elt_regcost = elt->regcost;
5034	    }
5035
5036	  /* Find cheapest and skip it for the next time.   For items
5037	     of equal cost, use this order:
5038	     src_folded, src, src_eqv, src_related and hash table entry.  */
5039	  if (src_folded
5040	      && preferable (src_folded_cost, src_folded_regcost,
5041			     src_cost, src_regcost) <= 0
5042	      && preferable (src_folded_cost, src_folded_regcost,
5043			     src_eqv_cost, src_eqv_regcost) <= 0
5044	      && preferable (src_folded_cost, src_folded_regcost,
5045			     src_related_cost, src_related_regcost) <= 0
5046	      && preferable (src_folded_cost, src_folded_regcost,
5047			     src_elt_cost, src_elt_regcost) <= 0)
5048	    {
5049	      trial = src_folded, src_folded_cost = MAX_COST;
5050	      if (src_folded_force_flag)
5051		{
5052		  rtx forced = force_const_mem (mode, trial);
5053		  if (forced)
5054		    trial = forced;
5055		}
5056	    }
5057	  else if (src
5058		   && preferable (src_cost, src_regcost,
5059				  src_eqv_cost, src_eqv_regcost) <= 0
5060		   && preferable (src_cost, src_regcost,
5061				  src_related_cost, src_related_regcost) <= 0
5062		   && preferable (src_cost, src_regcost,
5063				  src_elt_cost, src_elt_regcost) <= 0)
5064	    trial = src, src_cost = MAX_COST;
5065	  else if (src_eqv_here
5066		   && preferable (src_eqv_cost, src_eqv_regcost,
5067				  src_related_cost, src_related_regcost) <= 0
5068		   && preferable (src_eqv_cost, src_eqv_regcost,
5069				  src_elt_cost, src_elt_regcost) <= 0)
5070	    trial = src_eqv_here, src_eqv_cost = MAX_COST;
5071	  else if (src_related
5072		   && preferable (src_related_cost, src_related_regcost,
5073				  src_elt_cost, src_elt_regcost) <= 0)
5074	    trial = src_related, src_related_cost = MAX_COST;
5075	  else
5076	    {
5077	      trial = elt->exp;
5078	      elt = elt->next_same_value;
5079	      src_elt_cost = MAX_COST;
5080	    }
5081
5082	  /* Avoid creation of overlapping memory moves.  */
5083	  if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
5084	    {
5085	      rtx src, dest;
5086
5087	      /* BLKmode moves are not handled by cse anyway.  */
5088	      if (GET_MODE (trial) == BLKmode)
5089		break;
5090
5091	      src = canon_rtx (trial);
5092	      dest = canon_rtx (SET_DEST (sets[i].rtl));
5093
5094	      if (!MEM_P (src) || !MEM_P (dest)
5095		  || !nonoverlapping_memrefs_p (src, dest, false))
5096		break;
5097	    }
5098
5099	  /* Try to optimize
5100	     (set (reg:M N) (const_int A))
5101	     (set (reg:M2 O) (const_int B))
5102	     (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5103		  (reg:M2 O)).  */
5104	  if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5105	      && CONST_INT_P (trial)
5106	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5107	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5108	      && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5109	      && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
5110		  >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5111	      && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5112		  + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5113		  <= HOST_BITS_PER_WIDE_INT))
5114	    {
5115	      rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5116	      rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5117	      rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5118	      unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5119	      struct table_elt *dest_elt
5120		= lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5121	      rtx dest_cst = NULL;
5122
5123	      if (dest_elt)
5124		for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5125		  if (p->is_const && CONST_INT_P (p->exp))
5126		    {
5127		      dest_cst = p->exp;
5128		      break;
5129		    }
5130	      if (dest_cst)
5131		{
5132		  HOST_WIDE_INT val = INTVAL (dest_cst);
5133		  HOST_WIDE_INT mask;
5134		  unsigned int shift;
5135		  if (BITS_BIG_ENDIAN)
5136		    shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
5137			    - INTVAL (pos) - INTVAL (width);
5138		  else
5139		    shift = INTVAL (pos);
5140		  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5141		    mask = ~(HOST_WIDE_INT) 0;
5142		  else
5143		    mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5144		  val &= ~(mask << shift);
5145		  val |= (INTVAL (trial) & mask) << shift;
5146		  val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5147		  validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5148					   dest_reg, 1);
5149		  validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5150					   GEN_INT (val), 1);
5151		  if (apply_change_group ())
5152		    {
5153		      rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5154		      if (note)
5155			{
5156			  remove_note (insn, note);
5157			  df_notes_rescan (insn);
5158			}
5159		      src_eqv = NULL_RTX;
5160		      src_eqv_elt = NULL;
5161		      src_eqv_volatile = 0;
5162		      src_eqv_in_memory = 0;
5163		      src_eqv_hash = 0;
5164		      repeat = true;
5165		      break;
5166		    }
5167		}
5168	    }
5169
5170	  /* We don't normally have an insn matching (set (pc) (pc)), so
5171	     check for this separately here.  We will delete such an
5172	     insn below.
5173
5174	     For other cases such as a table jump or conditional jump
5175	     where we know the ultimate target, go ahead and replace the
5176	     operand.  While that may not make a valid insn, we will
5177	     reemit the jump below (and also insert any necessary
5178	     barriers).  */
5179	  if (n_sets == 1 && dest == pc_rtx
5180	      && (trial == pc_rtx
5181		  || (GET_CODE (trial) == LABEL_REF
5182		      && ! condjump_p (insn))))
5183	    {
5184	      /* Don't substitute non-local labels, this confuses CFG.  */
5185	      if (GET_CODE (trial) == LABEL_REF
5186		  && LABEL_REF_NONLOCAL_P (trial))
5187		continue;
5188
5189	      SET_SRC (sets[i].rtl) = trial;
5190	      cse_jumps_altered = true;
5191	      break;
5192	    }
5193
5194	  /* Reject certain invalid forms of CONST that we create.  */
5195	  else if (CONSTANT_P (trial)
5196		   && GET_CODE (trial) == CONST
5197		   /* Reject cases that will cause decode_rtx_const to
5198		      die.  On the alpha when simplifying a switch, we
5199		      get (const (truncate (minus (label_ref)
5200		      (label_ref)))).  */
5201		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5202		       /* Likewise on IA-64, except without the
5203			  truncate.  */
5204		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5205			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5206			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5207	    /* Do nothing for this case.  */
5208	    ;
5209
5210	  /* Look for a substitution that makes a valid insn.  */
5211	  else if (validate_unshare_change
5212		     (insn, &SET_SRC (sets[i].rtl), trial, 0))
5213	    {
5214	      rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5215
5216	      /* The result of apply_change_group can be ignored; see
5217		 canon_reg.  */
5218
5219	      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5220	      apply_change_group ();
5221
5222	      break;
5223	    }
5224
5225	  /* If we previously found constant pool entries for
5226	     constants and this is a constant, try making a
5227	     pool entry.  Put it in src_folded unless we already have done
5228	     this since that is where it likely came from.  */
5229
5230	  else if (constant_pool_entries_cost
5231		   && CONSTANT_P (trial)
5232		   && (src_folded == 0
5233		       || (!MEM_P (src_folded)
5234			   && ! src_folded_force_flag))
5235		   && GET_MODE_CLASS (mode) != MODE_CC
5236		   && mode != VOIDmode)
5237	    {
5238	      src_folded_force_flag = 1;
5239	      src_folded = trial;
5240	      src_folded_cost = constant_pool_entries_cost;
5241	      src_folded_regcost = constant_pool_entries_regcost;
5242	    }
5243	}
5244
5245      /* If we changed the insn too much, handle this set from scratch.  */
5246      if (repeat)
5247	{
5248	  i--;
5249	  continue;
5250	}
5251
5252      src = SET_SRC (sets[i].rtl);
5253
5254      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5255	 However, there is an important exception:  If both are registers
5256	 that are not the head of their equivalence class, replace SET_SRC
5257	 with the head of the class.  If we do not do this, we will have
5258	 both registers live over a portion of the basic block.  This way,
5259	 their lifetimes will likely abut instead of overlapping.  */
5260      if (REG_P (dest)
5261	  && REGNO_QTY_VALID_P (REGNO (dest)))
5262	{
5263	  int dest_q = REG_QTY (REGNO (dest));
5264	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5265
5266	  if (dest_ent->mode == GET_MODE (dest)
5267	      && dest_ent->first_reg != REGNO (dest)
5268	      && REG_P (src) && REGNO (src) == REGNO (dest)
5269	      /* Don't do this if the original insn had a hard reg as
5270		 SET_SRC or SET_DEST.  */
5271	      && (!REG_P (sets[i].src)
5272		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5273	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5274	    /* We can't call canon_reg here because it won't do anything if
5275	       SRC is a hard register.  */
5276	    {
5277	      int src_q = REG_QTY (REGNO (src));
5278	      struct qty_table_elem *src_ent = &qty_table[src_q];
5279	      int first = src_ent->first_reg;
5280	      rtx new_src
5281		= (first >= FIRST_PSEUDO_REGISTER
5282		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5283
5284	      /* We must use validate-change even for this, because this
5285		 might be a special no-op instruction, suitable only to
5286		 tag notes onto.  */
5287	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5288		{
5289		  src = new_src;
5290		  /* If we had a constant that is cheaper than what we are now
5291		     setting SRC to, use that constant.  We ignored it when we
5292		     thought we could make this into a no-op.  */
5293		  if (src_const && COST (src_const) < COST (src)
5294		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5295					  src_const, 0))
5296		    src = src_const;
5297		}
5298	    }
5299	}
5300
5301      /* If we made a change, recompute SRC values.  */
5302      if (src != sets[i].src)
5303	{
5304	  do_not_record = 0;
5305	  hash_arg_in_memory = 0;
5306	  sets[i].src = src;
5307	  sets[i].src_hash = HASH (src, mode);
5308	  sets[i].src_volatile = do_not_record;
5309	  sets[i].src_in_memory = hash_arg_in_memory;
5310	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5311	}
5312
5313      /* If this is a single SET, we are setting a register, and we have an
5314	 equivalent constant, we want to add a REG_EQUAL note if the constant
5315	 is different from the source.  We don't want to do it for a constant
5316	 pseudo since verifying that this pseudo hasn't been eliminated is a
5317	 pain; moreover such a note won't help anything.
5318
5319	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5320	 which can be created for a reference to a compile time computable
5321	 entry in a jump table.  */
5322      if (n_sets == 1
5323	  && REG_P (dest)
5324	  && src_const
5325	  && !REG_P (src_const)
5326	  && !(GET_CODE (src_const) == SUBREG
5327	       && REG_P (SUBREG_REG (src_const)))
5328	  && !(GET_CODE (src_const) == CONST
5329	       && GET_CODE (XEXP (src_const, 0)) == MINUS
5330	       && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5331	       && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5332	  && !rtx_equal_p (src, src_const))
5333	{
5334	  /* Make sure that the rtx is not shared.  */
5335	  src_const = copy_rtx (src_const);
5336
5337	  /* Record the actual constant value in a REG_EQUAL note,
5338	     making a new one if one does not already exist.  */
5339	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5340	  df_notes_rescan (insn);
5341	}
5342
5343      /* Now deal with the destination.  */
5344      do_not_record = 0;
5345
5346      /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5347      while (GET_CODE (dest) == SUBREG
5348	     || GET_CODE (dest) == ZERO_EXTRACT
5349	     || GET_CODE (dest) == STRICT_LOW_PART)
5350	dest = XEXP (dest, 0);
5351
5352      sets[i].inner_dest = dest;
5353
5354      if (MEM_P (dest))
5355	{
5356#ifdef PUSH_ROUNDING
5357	  /* Stack pushes invalidate the stack pointer.  */
5358	  rtx addr = XEXP (dest, 0);
5359	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5360	      && XEXP (addr, 0) == stack_pointer_rtx)
5361	    invalidate (stack_pointer_rtx, VOIDmode);
5362#endif
5363	  dest = fold_rtx (dest, insn);
5364	}
5365
5366      /* Compute the hash code of the destination now,
5367	 before the effects of this instruction are recorded,
5368	 since the register values used in the address computation
5369	 are those before this instruction.  */
5370      sets[i].dest_hash = HASH (dest, mode);
5371
5372      /* Don't enter a bit-field in the hash table
5373	 because the value in it after the store
5374	 may not equal what was stored, due to truncation.  */
5375
5376      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5377	{
5378	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5379
5380	  if (src_const != 0 && CONST_INT_P (src_const)
5381	      && CONST_INT_P (width)
5382	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5383	      && ! (INTVAL (src_const)
5384		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5385	    /* Exception: if the value is constant,
5386	       and it won't be truncated, record it.  */
5387	    ;
5388	  else
5389	    {
5390	      /* This is chosen so that the destination will be invalidated
5391		 but no new value will be recorded.
5392		 We must invalidate because sometimes constant
5393		 values can be recorded for bitfields.  */
5394	      sets[i].src_elt = 0;
5395	      sets[i].src_volatile = 1;
5396	      src_eqv = 0;
5397	      src_eqv_elt = 0;
5398	    }
5399	}
5400
5401      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5402	 the insn.  */
5403      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5404	{
5405	  /* One less use of the label this insn used to jump to.  */
5406	  delete_insn_and_edges (insn);
5407	  cse_jumps_altered = true;
5408	  /* No more processing for this set.  */
5409	  sets[i].rtl = 0;
5410	}
5411
5412      /* If this SET is now setting PC to a label, we know it used to
5413	 be a conditional or computed branch.  */
5414      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5415	       && !LABEL_REF_NONLOCAL_P (src))
5416	{
5417	  /* We reemit the jump in as many cases as possible just in
5418	     case the form of an unconditional jump is significantly
5419	     different than a computed jump or conditional jump.
5420
5421	     If this insn has multiple sets, then reemitting the
5422	     jump is nontrivial.  So instead we just force rerecognition
5423	     and hope for the best.  */
5424	  if (n_sets == 1)
5425	    {
5426	      rtx new_rtx, note;
5427
5428	      new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5429	      JUMP_LABEL (new_rtx) = XEXP (src, 0);
5430	      LABEL_NUSES (XEXP (src, 0))++;
5431
5432	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5433	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5434	      if (note)
5435		{
5436		  XEXP (note, 1) = NULL_RTX;
5437		  REG_NOTES (new_rtx) = note;
5438		}
5439
5440	      delete_insn_and_edges (insn);
5441	      insn = new_rtx;
5442	    }
5443	  else
5444	    INSN_CODE (insn) = -1;
5445
5446	  /* Do not bother deleting any unreachable code, let jump do it.  */
5447	  cse_jumps_altered = true;
5448	  sets[i].rtl = 0;
5449	}
5450
5451      /* If destination is volatile, invalidate it and then do no further
5452	 processing for this assignment.  */
5453
5454      else if (do_not_record)
5455	{
5456	  if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5457	    invalidate (dest, VOIDmode);
5458	  else if (MEM_P (dest))
5459	    invalidate (dest, VOIDmode);
5460	  else if (GET_CODE (dest) == STRICT_LOW_PART
5461		   || GET_CODE (dest) == ZERO_EXTRACT)
5462	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5463	  sets[i].rtl = 0;
5464	}
5465
5466      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5467	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5468
5469#ifdef HAVE_cc0
5470      /* If setting CC0, record what it was set to, or a constant, if it
5471	 is equivalent to a constant.  If it is being set to a floating-point
5472	 value, make a COMPARE with the appropriate constant of 0.  If we
5473	 don't do this, later code can interpret this as a test against
5474	 const0_rtx, which can cause problems if we try to put it into an
5475	 insn as a floating-point operand.  */
5476      if (dest == cc0_rtx)
5477	{
5478	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5479	  this_insn_cc0_mode = mode;
5480	  if (FLOAT_MODE_P (mode))
5481	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5482					     CONST0_RTX (mode));
5483	}
5484#endif
5485    }
5486
5487  /* Now enter all non-volatile source expressions in the hash table
5488     if they are not already present.
5489     Record their equivalence classes in src_elt.
5490     This way we can insert the corresponding destinations into
5491     the same classes even if the actual sources are no longer in them
5492     (having been invalidated).  */
5493
5494  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5495      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5496    {
5497      struct table_elt *elt;
5498      struct table_elt *classp = sets[0].src_elt;
5499      rtx dest = SET_DEST (sets[0].rtl);
5500      enum machine_mode eqvmode = GET_MODE (dest);
5501
5502      if (GET_CODE (dest) == STRICT_LOW_PART)
5503	{
5504	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5505	  classp = 0;
5506	}
5507      if (insert_regs (src_eqv, classp, 0))
5508	{
5509	  rehash_using_reg (src_eqv);
5510	  src_eqv_hash = HASH (src_eqv, eqvmode);
5511	}
5512      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5513      elt->in_memory = src_eqv_in_memory;
5514      src_eqv_elt = elt;
5515
5516      /* Check to see if src_eqv_elt is the same as a set source which
5517	 does not yet have an elt, and if so set the elt of the set source
5518	 to src_eqv_elt.  */
5519      for (i = 0; i < n_sets; i++)
5520	if (sets[i].rtl && sets[i].src_elt == 0
5521	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5522	  sets[i].src_elt = src_eqv_elt;
5523    }
5524
5525  for (i = 0; i < n_sets; i++)
5526    if (sets[i].rtl && ! sets[i].src_volatile
5527	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5528      {
5529	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5530	  {
5531	    /* REG_EQUAL in setting a STRICT_LOW_PART
5532	       gives an equivalent for the entire destination register,
5533	       not just for the subreg being stored in now.
5534	       This is a more interesting equivalence, so we arrange later
5535	       to treat the entire reg as the destination.  */
5536	    sets[i].src_elt = src_eqv_elt;
5537	    sets[i].src_hash = src_eqv_hash;
5538	  }
5539	else
5540	  {
5541	    /* Insert source and constant equivalent into hash table, if not
5542	       already present.  */
5543	    struct table_elt *classp = src_eqv_elt;
5544	    rtx src = sets[i].src;
5545	    rtx dest = SET_DEST (sets[i].rtl);
5546	    enum machine_mode mode
5547	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5548
5549	    /* It's possible that we have a source value known to be
5550	       constant but don't have a REG_EQUAL note on the insn.
5551	       Lack of a note will mean src_eqv_elt will be NULL.  This
5552	       can happen where we've generated a SUBREG to access a
5553	       CONST_INT that is already in a register in a wider mode.
5554	       Ensure that the source expression is put in the proper
5555	       constant class.  */
5556	    if (!classp)
5557	      classp = sets[i].src_const_elt;
5558
5559	    if (sets[i].src_elt == 0)
5560	      {
5561		struct table_elt *elt;
5562
5563		/* Note that these insert_regs calls cannot remove
5564		   any of the src_elt's, because they would have failed to
5565		   match if not still valid.  */
5566		if (insert_regs (src, classp, 0))
5567		  {
5568		    rehash_using_reg (src);
5569		    sets[i].src_hash = HASH (src, mode);
5570		  }
5571		elt = insert (src, classp, sets[i].src_hash, mode);
5572		elt->in_memory = sets[i].src_in_memory;
5573		sets[i].src_elt = classp = elt;
5574	      }
5575	    if (sets[i].src_const && sets[i].src_const_elt == 0
5576		&& src != sets[i].src_const
5577		&& ! rtx_equal_p (sets[i].src_const, src))
5578	      sets[i].src_elt = insert (sets[i].src_const, classp,
5579					sets[i].src_const_hash, mode);
5580	  }
5581      }
5582    else if (sets[i].src_elt == 0)
5583      /* If we did not insert the source into the hash table (e.g., it was
5584	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5585	 so that the destination goes into that class.  */
5586      sets[i].src_elt = src_eqv_elt;
5587
5588  /* Record destination addresses in the hash table.  This allows us to
5589     check if they are invalidated by other sets.  */
5590  for (i = 0; i < n_sets; i++)
5591    {
5592      if (sets[i].rtl)
5593	{
5594	  rtx x = sets[i].inner_dest;
5595	  struct table_elt *elt;
5596	  enum machine_mode mode;
5597	  unsigned hash;
5598
5599	  if (MEM_P (x))
5600	    {
5601	      x = XEXP (x, 0);
5602	      mode = GET_MODE (x);
5603	      hash = HASH (x, mode);
5604	      elt = lookup (x, hash, mode);
5605	      if (!elt)
5606		{
5607		  if (insert_regs (x, NULL, 0))
5608		    {
5609		      rtx dest = SET_DEST (sets[i].rtl);
5610
5611		      rehash_using_reg (x);
5612		      hash = HASH (x, mode);
5613		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5614		    }
5615		  elt = insert (x, NULL, hash, mode);
5616		}
5617
5618	      sets[i].dest_addr_elt = elt;
5619	    }
5620	  else
5621	    sets[i].dest_addr_elt = NULL;
5622	}
5623    }
5624
5625  invalidate_from_clobbers (insn);
5626
5627  /* Some registers are invalidated by subroutine calls.  Memory is
5628     invalidated by non-constant calls.  */
5629
5630  if (CALL_P (insn))
5631    {
5632      if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5633	invalidate_memory ();
5634      invalidate_for_call ();
5635    }
5636
5637  /* Now invalidate everything set by this instruction.
5638     If a SUBREG or other funny destination is being set,
5639     sets[i].rtl is still nonzero, so here we invalidate the reg
5640     a part of which is being set.  */
5641
5642  for (i = 0; i < n_sets; i++)
5643    if (sets[i].rtl)
5644      {
5645	/* We can't use the inner dest, because the mode associated with
5646	   a ZERO_EXTRACT is significant.  */
5647	rtx dest = SET_DEST (sets[i].rtl);
5648
5649	/* Needed for registers to remove the register from its
5650	   previous quantity's chain.
5651	   Needed for memory if this is a nonvarying address, unless
5652	   we have just done an invalidate_memory that covers even those.  */
5653	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5654	  invalidate (dest, VOIDmode);
5655	else if (MEM_P (dest))
5656	  invalidate (dest, VOIDmode);
5657	else if (GET_CODE (dest) == STRICT_LOW_PART
5658		 || GET_CODE (dest) == ZERO_EXTRACT)
5659	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5660      }
5661
5662  /* A volatile ASM invalidates everything.  */
5663  if (NONJUMP_INSN_P (insn)
5664      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5665      && MEM_VOLATILE_P (PATTERN (insn)))
5666    flush_hash_table ();
5667
5668  /* Don't cse over a call to setjmp; on some machines (eg VAX)
5669     the regs restored by the longjmp come from a later time
5670     than the setjmp.  */
5671  if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5672    {
5673      flush_hash_table ();
5674      goto done;
5675    }
5676
5677  /* Make sure registers mentioned in destinations
5678     are safe for use in an expression to be inserted.
5679     This removes from the hash table
5680     any invalid entry that refers to one of these registers.
5681
5682     We don't care about the return value from mention_regs because
5683     we are going to hash the SET_DEST values unconditionally.  */
5684
5685  for (i = 0; i < n_sets; i++)
5686    {
5687      if (sets[i].rtl)
5688	{
5689	  rtx x = SET_DEST (sets[i].rtl);
5690
5691	  if (!REG_P (x))
5692	    mention_regs (x);
5693	  else
5694	    {
5695	      /* We used to rely on all references to a register becoming
5696		 inaccessible when a register changes to a new quantity,
5697		 since that changes the hash code.  However, that is not
5698		 safe, since after HASH_SIZE new quantities we get a
5699		 hash 'collision' of a register with its own invalid
5700		 entries.  And since SUBREGs have been changed not to
5701		 change their hash code with the hash code of the register,
5702		 it wouldn't work any longer at all.  So we have to check
5703		 for any invalid references lying around now.
5704		 This code is similar to the REG case in mention_regs,
5705		 but it knows that reg_tick has been incremented, and
5706		 it leaves reg_in_table as -1 .  */
5707	      unsigned int regno = REGNO (x);
5708	      unsigned int endregno = END_REGNO (x);
5709	      unsigned int i;
5710
5711	      for (i = regno; i < endregno; i++)
5712		{
5713		  if (REG_IN_TABLE (i) >= 0)
5714		    {
5715		      remove_invalid_refs (i);
5716		      REG_IN_TABLE (i) = -1;
5717		    }
5718		}
5719	    }
5720	}
5721    }
5722
5723  /* We may have just removed some of the src_elt's from the hash table.
5724     So replace each one with the current head of the same class.
5725     Also check if destination addresses have been removed.  */
5726
5727  for (i = 0; i < n_sets; i++)
5728    if (sets[i].rtl)
5729      {
5730	if (sets[i].dest_addr_elt
5731	    && sets[i].dest_addr_elt->first_same_value == 0)
5732	  {
5733	    /* The elt was removed, which means this destination is not
5734	       valid after this instruction.  */
5735	    sets[i].rtl = NULL_RTX;
5736	  }
5737	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5738	  /* If elt was removed, find current head of same class,
5739	     or 0 if nothing remains of that class.  */
5740	  {
5741	    struct table_elt *elt = sets[i].src_elt;
5742
5743	    while (elt && elt->prev_same_value)
5744	      elt = elt->prev_same_value;
5745
5746	    while (elt && elt->first_same_value == 0)
5747	      elt = elt->next_same_value;
5748	    sets[i].src_elt = elt ? elt->first_same_value : 0;
5749	  }
5750      }
5751
5752  /* Now insert the destinations into their equivalence classes.  */
5753
5754  for (i = 0; i < n_sets; i++)
5755    if (sets[i].rtl)
5756      {
5757	rtx dest = SET_DEST (sets[i].rtl);
5758	struct table_elt *elt;
5759
5760	/* Don't record value if we are not supposed to risk allocating
5761	   floating-point values in registers that might be wider than
5762	   memory.  */
5763	if ((flag_float_store
5764	     && MEM_P (dest)
5765	     && FLOAT_MODE_P (GET_MODE (dest)))
5766	    /* Don't record BLKmode values, because we don't know the
5767	       size of it, and can't be sure that other BLKmode values
5768	       have the same or smaller size.  */
5769	    || GET_MODE (dest) == BLKmode
5770	    /* If we didn't put a REG_EQUAL value or a source into the hash
5771	       table, there is no point is recording DEST.  */
5772	    || sets[i].src_elt == 0
5773	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5774	       or SIGN_EXTEND, don't record DEST since it can cause
5775	       some tracking to be wrong.
5776
5777	       ??? Think about this more later.  */
5778	    || (paradoxical_subreg_p (dest)
5779		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
5780		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5781	  continue;
5782
5783	/* STRICT_LOW_PART isn't part of the value BEING set,
5784	   and neither is the SUBREG inside it.
5785	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
5786	if (GET_CODE (dest) == STRICT_LOW_PART)
5787	  dest = SUBREG_REG (XEXP (dest, 0));
5788
5789	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5790	  /* Registers must also be inserted into chains for quantities.  */
5791	  if (insert_regs (dest, sets[i].src_elt, 1))
5792	    {
5793	      /* If `insert_regs' changes something, the hash code must be
5794		 recalculated.  */
5795	      rehash_using_reg (dest);
5796	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5797	    }
5798
5799	elt = insert (dest, sets[i].src_elt,
5800		      sets[i].dest_hash, GET_MODE (dest));
5801
5802	/* If this is a constant, insert the constant anchors with the
5803	   equivalent register-offset expressions using register DEST.  */
5804	if (targetm.const_anchor
5805	    && REG_P (dest)
5806	    && SCALAR_INT_MODE_P (GET_MODE (dest))
5807	    && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5808	  insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5809
5810	elt->in_memory = (MEM_P (sets[i].inner_dest)
5811			  && !MEM_READONLY_P (sets[i].inner_dest));
5812
5813	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5814	   narrower than M2, and both M1 and M2 are the same number of words,
5815	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5816	   make that equivalence as well.
5817
5818	   However, BAR may have equivalences for which gen_lowpart
5819	   will produce a simpler value than gen_lowpart applied to
5820	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5821	   BAR's equivalences.  If we don't get a simplified form, make
5822	   the SUBREG.  It will not be used in an equivalence, but will
5823	   cause two similar assignments to be detected.
5824
5825	   Note the loop below will find SUBREG_REG (DEST) since we have
5826	   already entered SRC and DEST of the SET in the table.  */
5827
5828	if (GET_CODE (dest) == SUBREG
5829	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5830		 / UNITS_PER_WORD)
5831		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5832	    && (GET_MODE_SIZE (GET_MODE (dest))
5833		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5834	    && sets[i].src_elt != 0)
5835	  {
5836	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5837	    struct table_elt *elt, *classp = 0;
5838
5839	    for (elt = sets[i].src_elt->first_same_value; elt;
5840		 elt = elt->next_same_value)
5841	      {
5842		rtx new_src = 0;
5843		unsigned src_hash;
5844		struct table_elt *src_elt;
5845		int byte = 0;
5846
5847		/* Ignore invalid entries.  */
5848		if (!REG_P (elt->exp)
5849		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5850		  continue;
5851
5852		/* We may have already been playing subreg games.  If the
5853		   mode is already correct for the destination, use it.  */
5854		if (GET_MODE (elt->exp) == new_mode)
5855		  new_src = elt->exp;
5856		else
5857		  {
5858		    /* Calculate big endian correction for the SUBREG_BYTE.
5859		       We have already checked that M1 (GET_MODE (dest))
5860		       is not narrower than M2 (new_mode).  */
5861		    if (BYTES_BIG_ENDIAN)
5862		      byte = (GET_MODE_SIZE (GET_MODE (dest))
5863			      - GET_MODE_SIZE (new_mode));
5864
5865		    new_src = simplify_gen_subreg (new_mode, elt->exp,
5866					           GET_MODE (dest), byte);
5867		  }
5868
5869		/* The call to simplify_gen_subreg fails if the value
5870		   is VOIDmode, yet we can't do any simplification, e.g.
5871		   for EXPR_LISTs denoting function call results.
5872		   It is invalid to construct a SUBREG with a VOIDmode
5873		   SUBREG_REG, hence a zero new_src means we can't do
5874		   this substitution.  */
5875		if (! new_src)
5876		  continue;
5877
5878		src_hash = HASH (new_src, new_mode);
5879		src_elt = lookup (new_src, src_hash, new_mode);
5880
5881		/* Put the new source in the hash table is if isn't
5882		   already.  */
5883		if (src_elt == 0)
5884		  {
5885		    if (insert_regs (new_src, classp, 0))
5886		      {
5887			rehash_using_reg (new_src);
5888			src_hash = HASH (new_src, new_mode);
5889		      }
5890		    src_elt = insert (new_src, classp, src_hash, new_mode);
5891		    src_elt->in_memory = elt->in_memory;
5892		  }
5893		else if (classp && classp != src_elt->first_same_value)
5894		  /* Show that two things that we've seen before are
5895		     actually the same.  */
5896		  merge_equiv_classes (src_elt, classp);
5897
5898		classp = src_elt->first_same_value;
5899		/* Ignore invalid entries.  */
5900		while (classp
5901		       && !REG_P (classp->exp)
5902		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5903		  classp = classp->next_same_value;
5904	      }
5905	  }
5906      }
5907
5908  /* Special handling for (set REG0 REG1) where REG0 is the
5909     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
5910     be used in the sequel, so (if easily done) change this insn to
5911     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5912     that computed their value.  Then REG1 will become a dead store
5913     and won't cloud the situation for later optimizations.
5914
5915     Do not make this change if REG1 is a hard register, because it will
5916     then be used in the sequel and we may be changing a two-operand insn
5917     into a three-operand insn.
5918
5919     Also do not do this if we are operating on a copy of INSN.  */
5920
5921  if (n_sets == 1 && sets[0].rtl)
5922    try_back_substitute_reg (sets[0].rtl, insn);
5923
5924done:;
5925}
5926
5927/* Remove from the hash table all expressions that reference memory.  */
5928
5929static void
5930invalidate_memory (void)
5931{
5932  int i;
5933  struct table_elt *p, *next;
5934
5935  for (i = 0; i < HASH_SIZE; i++)
5936    for (p = table[i]; p; p = next)
5937      {
5938	next = p->next_same_hash;
5939	if (p->in_memory)
5940	  remove_from_table (p, i);
5941      }
5942}
5943
5944/* Perform invalidation on the basis of everything about INSN,
5945   except for invalidating the actual places that are SET in it.
5946   This includes the places CLOBBERed, and anything that might
5947   alias with something that is SET or CLOBBERed.  */
5948
5949static void
5950invalidate_from_clobbers (rtx insn)
5951{
5952  rtx x = PATTERN (insn);
5953
5954  if (GET_CODE (x) == CLOBBER)
5955    {
5956      rtx ref = XEXP (x, 0);
5957      if (ref)
5958	{
5959	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
5960	      || MEM_P (ref))
5961	    invalidate (ref, VOIDmode);
5962	  else if (GET_CODE (ref) == STRICT_LOW_PART
5963		   || GET_CODE (ref) == ZERO_EXTRACT)
5964	    invalidate (XEXP (ref, 0), GET_MODE (ref));
5965	}
5966    }
5967  else if (GET_CODE (x) == PARALLEL)
5968    {
5969      int i;
5970      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5971	{
5972	  rtx y = XVECEXP (x, 0, i);
5973	  if (GET_CODE (y) == CLOBBER)
5974	    {
5975	      rtx ref = XEXP (y, 0);
5976	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
5977		  || MEM_P (ref))
5978		invalidate (ref, VOIDmode);
5979	      else if (GET_CODE (ref) == STRICT_LOW_PART
5980		       || GET_CODE (ref) == ZERO_EXTRACT)
5981		invalidate (XEXP (ref, 0), GET_MODE (ref));
5982	    }
5983	}
5984    }
5985}
5986
5987/* Perform invalidation on the basis of everything about INSN.
5988   This includes the places CLOBBERed, and anything that might
5989   alias with something that is SET or CLOBBERed.  */
5990
5991static void
5992invalidate_from_sets_and_clobbers (rtx insn)
5993{
5994  rtx tem;
5995  rtx x = PATTERN (insn);
5996
5997  if (CALL_P (insn))
5998    {
5999      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6000	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6001	  invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6002    }
6003
6004  /* Ensure we invalidate the destination register of a CALL insn.
6005     This is necessary for machines where this register is a fixed_reg,
6006     because no other code would invalidate it.  */
6007  if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6008    invalidate (SET_DEST (x), VOIDmode);
6009
6010  else if (GET_CODE (x) == PARALLEL)
6011    {
6012      int i;
6013
6014      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6015	{
6016	  rtx y = XVECEXP (x, 0, i);
6017	  if (GET_CODE (y) == CLOBBER)
6018	    {
6019	      rtx clobbered = XEXP (y, 0);
6020
6021	      if (REG_P (clobbered)
6022		  || GET_CODE (clobbered) == SUBREG)
6023		invalidate (clobbered, VOIDmode);
6024	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6025		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6026		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6027	    }
6028	  else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6029	    invalidate (SET_DEST (y), VOIDmode);
6030	}
6031    }
6032}
6033
6034/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6035   and replace any registers in them with either an equivalent constant
6036   or the canonical form of the register.  If we are inside an address,
6037   only do this if the address remains valid.
6038
6039   OBJECT is 0 except when within a MEM in which case it is the MEM.
6040
6041   Return the replacement for X.  */
6042
6043static rtx
6044cse_process_notes_1 (rtx x, rtx object, bool *changed)
6045{
6046  enum rtx_code code = GET_CODE (x);
6047  const char *fmt = GET_RTX_FORMAT (code);
6048  int i;
6049
6050  switch (code)
6051    {
6052    case CONST:
6053    case SYMBOL_REF:
6054    case LABEL_REF:
6055    CASE_CONST_ANY:
6056    case PC:
6057    case CC0:
6058    case LO_SUM:
6059      return x;
6060
6061    case MEM:
6062      validate_change (x, &XEXP (x, 0),
6063		       cse_process_notes (XEXP (x, 0), x, changed), 0);
6064      return x;
6065
6066    case EXPR_LIST:
6067    case INSN_LIST:
6068      if (REG_NOTE_KIND (x) == REG_EQUAL)
6069	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6070      if (XEXP (x, 1))
6071	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6072      return x;
6073
6074    case SIGN_EXTEND:
6075    case ZERO_EXTEND:
6076    case SUBREG:
6077      {
6078	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6079	/* We don't substitute VOIDmode constants into these rtx,
6080	   since they would impede folding.  */
6081	if (GET_MODE (new_rtx) != VOIDmode)
6082	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6083	return x;
6084      }
6085
6086    case UNSIGNED_FLOAT:
6087      {
6088	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6089	/* We don't substitute negative VOIDmode constants into these rtx,
6090	   since they would impede folding.  */
6091	if (GET_MODE (new_rtx) != VOIDmode
6092	    || (CONST_INT_P (new_rtx) && INTVAL (new_rtx) >= 0)
6093	    || (CONST_DOUBLE_P (new_rtx) && CONST_DOUBLE_HIGH (new_rtx) >= 0))
6094	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6095	return x;
6096      }
6097
6098    case REG:
6099      i = REG_QTY (REGNO (x));
6100
6101      /* Return a constant or a constant register.  */
6102      if (REGNO_QTY_VALID_P (REGNO (x)))
6103	{
6104	  struct qty_table_elem *ent = &qty_table[i];
6105
6106	  if (ent->const_rtx != NULL_RTX
6107	      && (CONSTANT_P (ent->const_rtx)
6108		  || REG_P (ent->const_rtx)))
6109	    {
6110	      rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6111	      if (new_rtx)
6112		return copy_rtx (new_rtx);
6113	    }
6114	}
6115
6116      /* Otherwise, canonicalize this register.  */
6117      return canon_reg (x, NULL_RTX);
6118
6119    default:
6120      break;
6121    }
6122
6123  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6124    if (fmt[i] == 'e')
6125      validate_change (object, &XEXP (x, i),
6126		       cse_process_notes (XEXP (x, i), object, changed), 0);
6127
6128  return x;
6129}
6130
6131static rtx
6132cse_process_notes (rtx x, rtx object, bool *changed)
6133{
6134  rtx new_rtx = cse_process_notes_1 (x, object, changed);
6135  if (new_rtx != x)
6136    *changed = true;
6137  return new_rtx;
6138}
6139
6140
6141/* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6142
6143   DATA is a pointer to a struct cse_basic_block_data, that is used to
6144   describe the path.
6145   It is filled with a queue of basic blocks, starting with FIRST_BB
6146   and following a trace through the CFG.
6147
6148   If all paths starting at FIRST_BB have been followed, or no new path
6149   starting at FIRST_BB can be constructed, this function returns FALSE.
6150   Otherwise, DATA->path is filled and the function returns TRUE indicating
6151   that a path to follow was found.
6152
6153   If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6154   block in the path will be FIRST_BB.  */
6155
6156static bool
6157cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6158	       int follow_jumps)
6159{
6160  basic_block bb;
6161  edge e;
6162  int path_size;
6163
6164  bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
6165
6166  /* See if there is a previous path.  */
6167  path_size = data->path_size;
6168
6169  /* There is a previous path.  Make sure it started with FIRST_BB.  */
6170  if (path_size)
6171    gcc_assert (data->path[0].bb == first_bb);
6172
6173  /* There was only one basic block in the last path.  Clear the path and
6174     return, so that paths starting at another basic block can be tried.  */
6175  if (path_size == 1)
6176    {
6177      path_size = 0;
6178      goto done;
6179    }
6180
6181  /* If the path was empty from the beginning, construct a new path.  */
6182  if (path_size == 0)
6183    data->path[path_size++].bb = first_bb;
6184  else
6185    {
6186      /* Otherwise, path_size must be equal to or greater than 2, because
6187	 a previous path exists that is at least two basic blocks long.
6188
6189	 Update the previous branch path, if any.  If the last branch was
6190	 previously along the branch edge, take the fallthrough edge now.  */
6191      while (path_size >= 2)
6192	{
6193	  basic_block last_bb_in_path, previous_bb_in_path;
6194	  edge e;
6195
6196	  --path_size;
6197	  last_bb_in_path = data->path[path_size].bb;
6198	  previous_bb_in_path = data->path[path_size - 1].bb;
6199
6200	  /* If we previously followed a path along the branch edge, try
6201	     the fallthru edge now.  */
6202	  if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6203	      && any_condjump_p (BB_END (previous_bb_in_path))
6204	      && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6205	      && e == BRANCH_EDGE (previous_bb_in_path))
6206	    {
6207	      bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6208	      if (bb != EXIT_BLOCK_PTR
6209		  && single_pred_p (bb)
6210		  /* We used to assert here that we would only see blocks
6211		     that we have not visited yet.  But we may end up
6212		     visiting basic blocks twice if the CFG has changed
6213		     in this run of cse_main, because when the CFG changes
6214		     the topological sort of the CFG also changes.  A basic
6215		     blocks that previously had more than two predecessors
6216		     may now have a single predecessor, and become part of
6217		     a path that starts at another basic block.
6218
6219		     We still want to visit each basic block only once, so
6220		     halt the path here if we have already visited BB.  */
6221		  && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
6222		{
6223		  bitmap_set_bit (cse_visited_basic_blocks, bb->index);
6224		  data->path[path_size++].bb = bb;
6225		  break;
6226		}
6227	    }
6228
6229	  data->path[path_size].bb = NULL;
6230	}
6231
6232      /* If only one block remains in the path, bail.  */
6233      if (path_size == 1)
6234	{
6235	  path_size = 0;
6236	  goto done;
6237	}
6238    }
6239
6240  /* Extend the path if possible.  */
6241  if (follow_jumps)
6242    {
6243      bb = data->path[path_size - 1].bb;
6244      while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6245	{
6246	  if (single_succ_p (bb))
6247	    e = single_succ_edge (bb);
6248	  else if (EDGE_COUNT (bb->succs) == 2
6249		   && any_condjump_p (BB_END (bb)))
6250	    {
6251	      /* First try to follow the branch.  If that doesn't lead
6252		 to a useful path, follow the fallthru edge.  */
6253	      e = BRANCH_EDGE (bb);
6254	      if (!single_pred_p (e->dest))
6255		e = FALLTHRU_EDGE (bb);
6256	    }
6257	  else
6258	    e = NULL;
6259
6260	  if (e
6261	      && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6262	      && e->dest != EXIT_BLOCK_PTR
6263	      && single_pred_p (e->dest)
6264	      /* Avoid visiting basic blocks twice.  The large comment
6265		 above explains why this can happen.  */
6266	      && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
6267	    {
6268	      basic_block bb2 = e->dest;
6269	      bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
6270	      data->path[path_size++].bb = bb2;
6271	      bb = bb2;
6272	    }
6273	  else
6274	    bb = NULL;
6275	}
6276    }
6277
6278done:
6279  data->path_size = path_size;
6280  return path_size != 0;
6281}
6282
6283/* Dump the path in DATA to file F.  NSETS is the number of sets
6284   in the path.  */
6285
6286static void
6287cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6288{
6289  int path_entry;
6290
6291  fprintf (f, ";; Following path with %d sets: ", nsets);
6292  for (path_entry = 0; path_entry < data->path_size; path_entry++)
6293    fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6294  fputc ('\n', dump_file);
6295  fflush (f);
6296}
6297
6298
6299/* Return true if BB has exception handling successor edges.  */
6300
6301static bool
6302have_eh_succ_edges (basic_block bb)
6303{
6304  edge e;
6305  edge_iterator ei;
6306
6307  FOR_EACH_EDGE (e, ei, bb->succs)
6308    if (e->flags & EDGE_EH)
6309      return true;
6310
6311  return false;
6312}
6313
6314
6315/* Scan to the end of the path described by DATA.  Return an estimate of
6316   the total number of SETs of all insns in the path.  */
6317
6318static void
6319cse_prescan_path (struct cse_basic_block_data *data)
6320{
6321  int nsets = 0;
6322  int path_size = data->path_size;
6323  int path_entry;
6324
6325  /* Scan to end of each basic block in the path.  */
6326  for (path_entry = 0; path_entry < path_size; path_entry++)
6327    {
6328      basic_block bb;
6329      rtx insn;
6330
6331      bb = data->path[path_entry].bb;
6332
6333      FOR_BB_INSNS (bb, insn)
6334	{
6335	  if (!INSN_P (insn))
6336	    continue;
6337
6338	  /* A PARALLEL can have lots of SETs in it,
6339	     especially if it is really an ASM_OPERANDS.  */
6340	  if (GET_CODE (PATTERN (insn)) == PARALLEL)
6341	    nsets += XVECLEN (PATTERN (insn), 0);
6342	  else
6343	    nsets += 1;
6344	}
6345    }
6346
6347  data->nsets = nsets;
6348}
6349
6350/* Process a single extended basic block described by EBB_DATA.  */
6351
6352static void
6353cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6354{
6355  int path_size = ebb_data->path_size;
6356  int path_entry;
6357  int num_insns = 0;
6358
6359  /* Allocate the space needed by qty_table.  */
6360  qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6361
6362  new_basic_block ();
6363  cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6364  cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6365  for (path_entry = 0; path_entry < path_size; path_entry++)
6366    {
6367      basic_block bb;
6368      rtx insn;
6369
6370      bb = ebb_data->path[path_entry].bb;
6371
6372      /* Invalidate recorded information for eh regs if there is an EH
6373	 edge pointing to that bb.  */
6374      if (bb_has_eh_pred (bb))
6375	{
6376	  df_ref *def_rec;
6377
6378	  for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
6379	    {
6380	      df_ref def = *def_rec;
6381	      if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6382		invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6383	    }
6384	}
6385
6386      optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6387      FOR_BB_INSNS (bb, insn)
6388	{
6389	  /* If we have processed 1,000 insns, flush the hash table to
6390	     avoid extreme quadratic behavior.  We must not include NOTEs
6391	     in the count since there may be more of them when generating
6392	     debugging information.  If we clear the table at different
6393	     times, code generated with -g -O might be different than code
6394	     generated with -O but not -g.
6395
6396	     FIXME: This is a real kludge and needs to be done some other
6397		    way.  */
6398	  if (NONDEBUG_INSN_P (insn)
6399	      && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6400	    {
6401	      flush_hash_table ();
6402	      num_insns = 0;
6403	    }
6404
6405	  if (INSN_P (insn))
6406	    {
6407	      /* Process notes first so we have all notes in canonical forms
6408		 when looking for duplicate operations.  */
6409	      if (REG_NOTES (insn))
6410		{
6411		  bool changed = false;
6412		  REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6413						        NULL_RTX, &changed);
6414		  if (changed)
6415		    df_notes_rescan (insn);
6416		}
6417
6418	      cse_insn (insn);
6419
6420	      /* If we haven't already found an insn where we added a LABEL_REF,
6421		 check this one.  */
6422	      if (INSN_P (insn) && !recorded_label_ref
6423		  && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6424				   (void *) insn))
6425		recorded_label_ref = true;
6426
6427#ifdef HAVE_cc0
6428	      if (NONDEBUG_INSN_P (insn))
6429		{
6430		  /* If the previous insn sets CC0 and this insn no
6431		     longer references CC0, delete the previous insn.
6432		     Here we use fact that nothing expects CC0 to be
6433		     valid over an insn, which is true until the final
6434		     pass.  */
6435		  rtx prev_insn, tem;
6436
6437		  prev_insn = prev_nonnote_nondebug_insn (insn);
6438		  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6439		      && (tem = single_set (prev_insn)) != NULL_RTX
6440		      && SET_DEST (tem) == cc0_rtx
6441		      && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6442		    delete_insn (prev_insn);
6443
6444		  /* If this insn is not the last insn in the basic
6445		     block, it will be PREV_INSN(insn) in the next
6446		     iteration.  If we recorded any CC0-related
6447		     information for this insn, remember it.  */
6448		  if (insn != BB_END (bb))
6449		    {
6450		      prev_insn_cc0 = this_insn_cc0;
6451		      prev_insn_cc0_mode = this_insn_cc0_mode;
6452		    }
6453		}
6454#endif
6455	    }
6456	}
6457
6458      /* With non-call exceptions, we are not always able to update
6459	 the CFG properly inside cse_insn.  So clean up possibly
6460	 redundant EH edges here.  */
6461      if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6462	cse_cfg_altered |= purge_dead_edges (bb);
6463
6464      /* If we changed a conditional jump, we may have terminated
6465	 the path we are following.  Check that by verifying that
6466	 the edge we would take still exists.  If the edge does
6467	 not exist anymore, purge the remainder of the path.
6468	 Note that this will cause us to return to the caller.  */
6469      if (path_entry < path_size - 1)
6470	{
6471	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6472	  if (!find_edge (bb, next_bb))
6473	    {
6474	      do
6475		{
6476		  path_size--;
6477
6478		  /* If we truncate the path, we must also reset the
6479		     visited bit on the remaining blocks in the path,
6480		     or we will never visit them at all.  */
6481		  bitmap_clear_bit (cse_visited_basic_blocks,
6482			     ebb_data->path[path_size].bb->index);
6483		  ebb_data->path[path_size].bb = NULL;
6484		}
6485	      while (path_size - 1 != path_entry);
6486	      ebb_data->path_size = path_size;
6487	    }
6488	}
6489
6490      /* If this is a conditional jump insn, record any known
6491	 equivalences due to the condition being tested.  */
6492      insn = BB_END (bb);
6493      if (path_entry < path_size - 1
6494	  && JUMP_P (insn)
6495	  && single_set (insn)
6496	  && any_condjump_p (insn))
6497	{
6498	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6499	  bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6500	  record_jump_equiv (insn, taken);
6501	}
6502
6503#ifdef HAVE_cc0
6504      /* Clear the CC0-tracking related insns, they can't provide
6505	 useful information across basic block boundaries.  */
6506      prev_insn_cc0 = 0;
6507#endif
6508    }
6509
6510  gcc_assert (next_qty <= max_qty);
6511
6512  free (qty_table);
6513}
6514
6515
6516/* Perform cse on the instructions of a function.
6517   F is the first instruction.
6518   NREGS is one plus the highest pseudo-reg number used in the instruction.
6519
6520   Return 2 if jump optimizations should be redone due to simplifications
6521   in conditional jump instructions.
6522   Return 1 if the CFG should be cleaned up because it has been modified.
6523   Return 0 otherwise.  */
6524
6525static int
6526cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
6527{
6528  struct cse_basic_block_data ebb_data;
6529  basic_block bb;
6530  int *rc_order = XNEWVEC (int, last_basic_block);
6531  int i, n_blocks;
6532
6533  df_set_flags (DF_LR_RUN_DCE);
6534  df_note_add_problem ();
6535  df_analyze ();
6536  df_set_flags (DF_DEFER_INSN_RESCAN);
6537
6538  reg_scan (get_insns (), max_reg_num ());
6539  init_cse_reg_info (nregs);
6540
6541  ebb_data.path = XNEWVEC (struct branch_path,
6542			   PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6543
6544  cse_cfg_altered = false;
6545  cse_jumps_altered = false;
6546  recorded_label_ref = false;
6547  constant_pool_entries_cost = 0;
6548  constant_pool_entries_regcost = 0;
6549  ebb_data.path_size = 0;
6550  ebb_data.nsets = 0;
6551  rtl_hooks = cse_rtl_hooks;
6552
6553  init_recog ();
6554  init_alias_analysis ();
6555
6556  reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6557
6558  /* Set up the table of already visited basic blocks.  */
6559  cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6560  bitmap_clear (cse_visited_basic_blocks);
6561
6562  /* Loop over basic blocks in reverse completion order (RPO),
6563     excluding the ENTRY and EXIT blocks.  */
6564  n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6565  i = 0;
6566  while (i < n_blocks)
6567    {
6568      /* Find the first block in the RPO queue that we have not yet
6569	 processed before.  */
6570      do
6571	{
6572	  bb = BASIC_BLOCK (rc_order[i++]);
6573	}
6574      while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
6575	     && i < n_blocks);
6576
6577      /* Find all paths starting with BB, and process them.  */
6578      while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6579	{
6580	  /* Pre-scan the path.  */
6581	  cse_prescan_path (&ebb_data);
6582
6583	  /* If this basic block has no sets, skip it.  */
6584	  if (ebb_data.nsets == 0)
6585	    continue;
6586
6587	  /* Get a reasonable estimate for the maximum number of qty's
6588	     needed for this path.  For this, we take the number of sets
6589	     and multiply that by MAX_RECOG_OPERANDS.  */
6590	  max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6591
6592	  /* Dump the path we're about to process.  */
6593	  if (dump_file)
6594	    cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6595
6596	  cse_extended_basic_block (&ebb_data);
6597	}
6598    }
6599
6600  /* Clean up.  */
6601  end_alias_analysis ();
6602  free (reg_eqv_table);
6603  free (ebb_data.path);
6604  sbitmap_free (cse_visited_basic_blocks);
6605  free (rc_order);
6606  rtl_hooks = general_rtl_hooks;
6607
6608  if (cse_jumps_altered || recorded_label_ref)
6609    return 2;
6610  else if (cse_cfg_altered)
6611    return 1;
6612  else
6613    return 0;
6614}
6615
6616/* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6617   which there isn't a REG_LABEL_OPERAND note.
6618   Return one if so.  DATA is the insn.  */
6619
6620static int
6621check_for_label_ref (rtx *rtl, void *data)
6622{
6623  rtx insn = (rtx) data;
6624
6625  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6626     note for it, we must rerun jump since it needs to place the note.  If
6627     this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6628     don't do this since no REG_LABEL_OPERAND will be added.  */
6629  return (GET_CODE (*rtl) == LABEL_REF
6630	  && ! LABEL_REF_NONLOCAL_P (*rtl)
6631	  && (!JUMP_P (insn)
6632	      || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
6633	  && LABEL_P (XEXP (*rtl, 0))
6634	  && INSN_UID (XEXP (*rtl, 0)) != 0
6635	  && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
6636}
6637
6638/* Count the number of times registers are used (not set) in X.
6639   COUNTS is an array in which we accumulate the count, INCR is how much
6640   we count each register usage.
6641
6642   Don't count a usage of DEST, which is the SET_DEST of a SET which
6643   contains X in its SET_SRC.  This is because such a SET does not
6644   modify the liveness of DEST.
6645   DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6646   We must then count uses of a SET_DEST regardless, because the insn can't be
6647   deleted here.  */
6648
6649static void
6650count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6651{
6652  enum rtx_code code;
6653  rtx note;
6654  const char *fmt;
6655  int i, j;
6656
6657  if (x == 0)
6658    return;
6659
6660  switch (code = GET_CODE (x))
6661    {
6662    case REG:
6663      if (x != dest)
6664	counts[REGNO (x)] += incr;
6665      return;
6666
6667    case PC:
6668    case CC0:
6669    case CONST:
6670    CASE_CONST_ANY:
6671    case SYMBOL_REF:
6672    case LABEL_REF:
6673      return;
6674
6675    case CLOBBER:
6676      /* If we are clobbering a MEM, mark any registers inside the address
6677         as being used.  */
6678      if (MEM_P (XEXP (x, 0)))
6679	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6680      return;
6681
6682    case SET:
6683      /* Unless we are setting a REG, count everything in SET_DEST.  */
6684      if (!REG_P (SET_DEST (x)))
6685	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6686      count_reg_usage (SET_SRC (x), counts,
6687		       dest ? dest : SET_DEST (x),
6688		       incr);
6689      return;
6690
6691    case DEBUG_INSN:
6692      return;
6693
6694    case CALL_INSN:
6695    case INSN:
6696    case JUMP_INSN:
6697      /* We expect dest to be NULL_RTX here.  If the insn may throw,
6698	 or if it cannot be deleted due to side-effects, mark this fact
6699	 by setting DEST to pc_rtx.  */
6700      if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6701	  || side_effects_p (PATTERN (x)))
6702	dest = pc_rtx;
6703      if (code == CALL_INSN)
6704	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6705      count_reg_usage (PATTERN (x), counts, dest, incr);
6706
6707      /* Things used in a REG_EQUAL note aren't dead since loop may try to
6708	 use them.  */
6709
6710      note = find_reg_equal_equiv_note (x);
6711      if (note)
6712	{
6713	  rtx eqv = XEXP (note, 0);
6714
6715	  if (GET_CODE (eqv) == EXPR_LIST)
6716	  /* This REG_EQUAL note describes the result of a function call.
6717	     Process all the arguments.  */
6718	    do
6719	      {
6720		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6721		eqv = XEXP (eqv, 1);
6722	      }
6723	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
6724	  else
6725	    count_reg_usage (eqv, counts, dest, incr);
6726	}
6727      return;
6728
6729    case EXPR_LIST:
6730      if (REG_NOTE_KIND (x) == REG_EQUAL
6731	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6732	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6733	     involving registers in the address.  */
6734	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
6735	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6736
6737      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6738      return;
6739
6740    case ASM_OPERANDS:
6741      /* Iterate over just the inputs, not the constraints as well.  */
6742      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6743	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6744      return;
6745
6746    case INSN_LIST:
6747      gcc_unreachable ();
6748
6749    default:
6750      break;
6751    }
6752
6753  fmt = GET_RTX_FORMAT (code);
6754  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6755    {
6756      if (fmt[i] == 'e')
6757	count_reg_usage (XEXP (x, i), counts, dest, incr);
6758      else if (fmt[i] == 'E')
6759	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6760	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6761    }
6762}
6763
6764/* Return true if X is a dead register.  */
6765
6766static inline int
6767is_dead_reg (rtx x, int *counts)
6768{
6769  return (REG_P (x)
6770	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
6771	  && counts[REGNO (x)] == 0);
6772}
6773
6774/* Return true if set is live.  */
6775static bool
6776set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
6777	    int *counts)
6778{
6779#ifdef HAVE_cc0
6780  rtx tem;
6781#endif
6782
6783  if (set_noop_p (set))
6784    ;
6785
6786#ifdef HAVE_cc0
6787  else if (GET_CODE (SET_DEST (set)) == CC0
6788	   && !side_effects_p (SET_SRC (set))
6789	   && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6790	       || !INSN_P (tem)
6791	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6792    return false;
6793#endif
6794  else if (!is_dead_reg (SET_DEST (set), counts)
6795	   || side_effects_p (SET_SRC (set)))
6796    return true;
6797  return false;
6798}
6799
6800/* Return true if insn is live.  */
6801
6802static bool
6803insn_live_p (rtx insn, int *counts)
6804{
6805  int i;
6806  if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6807    return true;
6808  else if (GET_CODE (PATTERN (insn)) == SET)
6809    return set_live_p (PATTERN (insn), insn, counts);
6810  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6811    {
6812      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6813	{
6814	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
6815
6816	  if (GET_CODE (elt) == SET)
6817	    {
6818	      if (set_live_p (elt, insn, counts))
6819		return true;
6820	    }
6821	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6822	    return true;
6823	}
6824      return false;
6825    }
6826  else if (DEBUG_INSN_P (insn))
6827    {
6828      rtx next;
6829
6830      for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6831	if (NOTE_P (next))
6832	  continue;
6833	else if (!DEBUG_INSN_P (next))
6834	  return true;
6835	else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6836	  return false;
6837
6838      return true;
6839    }
6840  else
6841    return true;
6842}
6843
6844/* Count the number of stores into pseudo.  Callback for note_stores.  */
6845
6846static void
6847count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6848{
6849  int *counts = (int *) data;
6850  if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6851    counts[REGNO (x)]++;
6852}
6853
6854struct dead_debug_insn_data
6855{
6856  int *counts;
6857  rtx *replacements;
6858  bool seen_repl;
6859};
6860
6861/* Return if a DEBUG_INSN needs to be reset because some dead
6862   pseudo doesn't have a replacement.  Callback for for_each_rtx.  */
6863
6864static int
6865is_dead_debug_insn (rtx *loc, void *data)
6866{
6867  rtx x = *loc;
6868  struct dead_debug_insn_data *ddid = (struct dead_debug_insn_data *) data;
6869
6870  if (is_dead_reg (x, ddid->counts))
6871    {
6872      if (ddid->replacements && ddid->replacements[REGNO (x)] != NULL_RTX)
6873	ddid->seen_repl = true;
6874      else
6875	return 1;
6876    }
6877  return 0;
6878}
6879
6880/* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6881   Callback for simplify_replace_fn_rtx.  */
6882
6883static rtx
6884replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
6885{
6886  rtx *replacements = (rtx *) data;
6887
6888  if (REG_P (x)
6889      && REGNO (x) >= FIRST_PSEUDO_REGISTER
6890      && replacements[REGNO (x)] != NULL_RTX)
6891    {
6892      if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
6893	return replacements[REGNO (x)];
6894      return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
6895			     GET_MODE (replacements[REGNO (x)]));
6896    }
6897  return NULL_RTX;
6898}
6899
6900/* Scan all the insns and delete any that are dead; i.e., they store a register
6901   that is never used or they copy a register to itself.
6902
6903   This is used to remove insns made obviously dead by cse, loop or other
6904   optimizations.  It improves the heuristics in loop since it won't try to
6905   move dead invariants out of loops or make givs for dead quantities.  The
6906   remaining passes of the compilation are also sped up.  */
6907
6908int
6909delete_trivially_dead_insns (rtx insns, int nreg)
6910{
6911  int *counts;
6912  rtx insn, prev;
6913  rtx *replacements = NULL;
6914  int ndead = 0;
6915
6916  timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6917  /* First count the number of times each register is used.  */
6918  if (MAY_HAVE_DEBUG_INSNS)
6919    {
6920      counts = XCNEWVEC (int, nreg * 3);
6921      for (insn = insns; insn; insn = NEXT_INSN (insn))
6922	if (DEBUG_INSN_P (insn))
6923	  count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6924			   NULL_RTX, 1);
6925	else if (INSN_P (insn))
6926	  {
6927	    count_reg_usage (insn, counts, NULL_RTX, 1);
6928	    note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
6929	  }
6930      /* If there can be debug insns, COUNTS are 3 consecutive arrays.
6931	 First one counts how many times each pseudo is used outside
6932	 of debug insns, second counts how many times each pseudo is
6933	 used in debug insns and third counts how many times a pseudo
6934	 is stored.  */
6935    }
6936  else
6937    {
6938      counts = XCNEWVEC (int, nreg);
6939      for (insn = insns; insn; insn = NEXT_INSN (insn))
6940	if (INSN_P (insn))
6941	  count_reg_usage (insn, counts, NULL_RTX, 1);
6942      /* If no debug insns can be present, COUNTS is just an array
6943	 which counts how many times each pseudo is used.  */
6944    }
6945  /* Go from the last insn to the first and delete insns that only set unused
6946     registers or copy a register to itself.  As we delete an insn, remove
6947     usage counts for registers it uses.
6948
6949     The first jump optimization pass may leave a real insn as the last
6950     insn in the function.   We must not skip that insn or we may end
6951     up deleting code that is not really dead.
6952
6953     If some otherwise unused register is only used in DEBUG_INSNs,
6954     try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
6955     the setter.  Then go through DEBUG_INSNs and if a DEBUG_EXPR
6956     has been created for the unused register, replace it with
6957     the DEBUG_EXPR, otherwise reset the DEBUG_INSN.  */
6958  for (insn = get_last_insn (); insn; insn = prev)
6959    {
6960      int live_insn = 0;
6961
6962      prev = PREV_INSN (insn);
6963      if (!INSN_P (insn))
6964	continue;
6965
6966      live_insn = insn_live_p (insn, counts);
6967
6968      /* If this is a dead insn, delete it and show registers in it aren't
6969	 being used.  */
6970
6971      if (! live_insn && dbg_cnt (delete_trivial_dead))
6972	{
6973	  if (DEBUG_INSN_P (insn))
6974	    count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6975			     NULL_RTX, -1);
6976	  else
6977	    {
6978	      rtx set;
6979	      if (MAY_HAVE_DEBUG_INSNS
6980		  && (set = single_set (insn)) != NULL_RTX
6981		  && is_dead_reg (SET_DEST (set), counts)
6982		  /* Used at least once in some DEBUG_INSN.  */
6983		  && counts[REGNO (SET_DEST (set)) + nreg] > 0
6984		  /* And set exactly once.  */
6985		  && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
6986		  && !side_effects_p (SET_SRC (set))
6987		  && asm_noperands (PATTERN (insn)) < 0)
6988		{
6989		  rtx dval, bind;
6990
6991		  /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
6992		  dval = make_debug_expr_from_rtl (SET_DEST (set));
6993
6994		  /* Emit a debug bind insn before the insn in which
6995		     reg dies.  */
6996		  bind = gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
6997					       DEBUG_EXPR_TREE_DECL (dval),
6998					       SET_SRC (set),
6999					       VAR_INIT_STATUS_INITIALIZED);
7000		  count_reg_usage (bind, counts + nreg, NULL_RTX, 1);
7001
7002		  bind = emit_debug_insn_before (bind, insn);
7003		  df_insn_rescan (bind);
7004
7005		  if (replacements == NULL)
7006		    replacements = XCNEWVEC (rtx, nreg);
7007		  replacements[REGNO (SET_DEST (set))] = dval;
7008		}
7009
7010	      count_reg_usage (insn, counts, NULL_RTX, -1);
7011	      ndead++;
7012	    }
7013	  delete_insn_and_edges (insn);
7014	}
7015    }
7016
7017  if (MAY_HAVE_DEBUG_INSNS)
7018    {
7019      struct dead_debug_insn_data ddid;
7020      ddid.counts = counts;
7021      ddid.replacements = replacements;
7022      for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7023	if (DEBUG_INSN_P (insn))
7024	  {
7025	    /* If this debug insn references a dead register that wasn't replaced
7026	       with an DEBUG_EXPR, reset the DEBUG_INSN.  */
7027	    ddid.seen_repl = false;
7028	    if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn),
7029			      is_dead_debug_insn, &ddid))
7030	      {
7031		INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7032		df_insn_rescan (insn);
7033	      }
7034	    else if (ddid.seen_repl)
7035	      {
7036		INSN_VAR_LOCATION_LOC (insn)
7037		  = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7038					     NULL_RTX, replace_dead_reg,
7039					     replacements);
7040		df_insn_rescan (insn);
7041	      }
7042	  }
7043      free (replacements);
7044    }
7045
7046  if (dump_file && ndead)
7047    fprintf (dump_file, "Deleted %i trivially dead insns\n",
7048	     ndead);
7049  /* Clean up.  */
7050  free (counts);
7051  timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7052  return ndead;
7053}
7054
7055/* This function is called via for_each_rtx.  The argument, NEWREG, is
7056   a condition code register with the desired mode.  If we are looking
7057   at the same register in a different mode, replace it with
7058   NEWREG.  */
7059
7060static int
7061cse_change_cc_mode (rtx *loc, void *data)
7062{
7063  struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7064
7065  if (*loc
7066      && REG_P (*loc)
7067      && REGNO (*loc) == REGNO (args->newreg)
7068      && GET_MODE (*loc) != GET_MODE (args->newreg))
7069    {
7070      validate_change (args->insn, loc, args->newreg, 1);
7071
7072      return -1;
7073    }
7074  return 0;
7075}
7076
7077/* Change the mode of any reference to the register REGNO (NEWREG) to
7078   GET_MODE (NEWREG) in INSN.  */
7079
7080static void
7081cse_change_cc_mode_insn (rtx insn, rtx newreg)
7082{
7083  struct change_cc_mode_args args;
7084  int success;
7085
7086  if (!INSN_P (insn))
7087    return;
7088
7089  args.insn = insn;
7090  args.newreg = newreg;
7091
7092  for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7093  for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7094
7095  /* If the following assertion was triggered, there is most probably
7096     something wrong with the cc_modes_compatible back end function.
7097     CC modes only can be considered compatible if the insn - with the mode
7098     replaced by any of the compatible modes - can still be recognized.  */
7099  success = apply_change_group ();
7100  gcc_assert (success);
7101}
7102
7103/* Change the mode of any reference to the register REGNO (NEWREG) to
7104   GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7105   any instruction which modifies NEWREG.  */
7106
7107static void
7108cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7109{
7110  rtx insn;
7111
7112  for (insn = start; insn != end; insn = NEXT_INSN (insn))
7113    {
7114      if (! INSN_P (insn))
7115	continue;
7116
7117      if (reg_set_p (newreg, insn))
7118	return;
7119
7120      cse_change_cc_mode_insn (insn, newreg);
7121    }
7122}
7123
7124/* BB is a basic block which finishes with CC_REG as a condition code
7125   register which is set to CC_SRC.  Look through the successors of BB
7126   to find blocks which have a single predecessor (i.e., this one),
7127   and look through those blocks for an assignment to CC_REG which is
7128   equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7129   permitted to change the mode of CC_SRC to a compatible mode.  This
7130   returns VOIDmode if no equivalent assignments were found.
7131   Otherwise it returns the mode which CC_SRC should wind up with.
7132   ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7133   but is passed unmodified down to recursive calls in order to prevent
7134   endless recursion.
7135
7136   The main complexity in this function is handling the mode issues.
7137   We may have more than one duplicate which we can eliminate, and we
7138   try to find a mode which will work for multiple duplicates.  */
7139
7140static enum machine_mode
7141cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7142	      bool can_change_mode)
7143{
7144  bool found_equiv;
7145  enum machine_mode mode;
7146  unsigned int insn_count;
7147  edge e;
7148  rtx insns[2];
7149  enum machine_mode modes[2];
7150  rtx last_insns[2];
7151  unsigned int i;
7152  rtx newreg;
7153  edge_iterator ei;
7154
7155  /* We expect to have two successors.  Look at both before picking
7156     the final mode for the comparison.  If we have more successors
7157     (i.e., some sort of table jump, although that seems unlikely),
7158     then we require all beyond the first two to use the same
7159     mode.  */
7160
7161  found_equiv = false;
7162  mode = GET_MODE (cc_src);
7163  insn_count = 0;
7164  FOR_EACH_EDGE (e, ei, bb->succs)
7165    {
7166      rtx insn;
7167      rtx end;
7168
7169      if (e->flags & EDGE_COMPLEX)
7170	continue;
7171
7172      if (EDGE_COUNT (e->dest->preds) != 1
7173	  || e->dest == EXIT_BLOCK_PTR
7174	  /* Avoid endless recursion on unreachable blocks.  */
7175	  || e->dest == orig_bb)
7176	continue;
7177
7178      end = NEXT_INSN (BB_END (e->dest));
7179      for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7180	{
7181	  rtx set;
7182
7183	  if (! INSN_P (insn))
7184	    continue;
7185
7186	  /* If CC_SRC is modified, we have to stop looking for
7187	     something which uses it.  */
7188	  if (modified_in_p (cc_src, insn))
7189	    break;
7190
7191	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7192	  set = single_set (insn);
7193	  if (set
7194	      && REG_P (SET_DEST (set))
7195	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7196	    {
7197	      bool found;
7198	      enum machine_mode set_mode;
7199	      enum machine_mode comp_mode;
7200
7201	      found = false;
7202	      set_mode = GET_MODE (SET_SRC (set));
7203	      comp_mode = set_mode;
7204	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7205		found = true;
7206	      else if (GET_CODE (cc_src) == COMPARE
7207		       && GET_CODE (SET_SRC (set)) == COMPARE
7208		       && mode != set_mode
7209		       && rtx_equal_p (XEXP (cc_src, 0),
7210				       XEXP (SET_SRC (set), 0))
7211		       && rtx_equal_p (XEXP (cc_src, 1),
7212				       XEXP (SET_SRC (set), 1)))
7213
7214		{
7215		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7216		  if (comp_mode != VOIDmode
7217		      && (can_change_mode || comp_mode == mode))
7218		    found = true;
7219		}
7220
7221	      if (found)
7222		{
7223		  found_equiv = true;
7224		  if (insn_count < ARRAY_SIZE (insns))
7225		    {
7226		      insns[insn_count] = insn;
7227		      modes[insn_count] = set_mode;
7228		      last_insns[insn_count] = end;
7229		      ++insn_count;
7230
7231		      if (mode != comp_mode)
7232			{
7233			  gcc_assert (can_change_mode);
7234			  mode = comp_mode;
7235
7236			  /* The modified insn will be re-recognized later.  */
7237			  PUT_MODE (cc_src, mode);
7238			}
7239		    }
7240		  else
7241		    {
7242		      if (set_mode != mode)
7243			{
7244			  /* We found a matching expression in the
7245			     wrong mode, but we don't have room to
7246			     store it in the array.  Punt.  This case
7247			     should be rare.  */
7248			  break;
7249			}
7250		      /* INSN sets CC_REG to a value equal to CC_SRC
7251			 with the right mode.  We can simply delete
7252			 it.  */
7253		      delete_insn (insn);
7254		    }
7255
7256		  /* We found an instruction to delete.  Keep looking,
7257		     in the hopes of finding a three-way jump.  */
7258		  continue;
7259		}
7260
7261	      /* We found an instruction which sets the condition
7262		 code, so don't look any farther.  */
7263	      break;
7264	    }
7265
7266	  /* If INSN sets CC_REG in some other way, don't look any
7267	     farther.  */
7268	  if (reg_set_p (cc_reg, insn))
7269	    break;
7270	}
7271
7272      /* If we fell off the bottom of the block, we can keep looking
7273	 through successors.  We pass CAN_CHANGE_MODE as false because
7274	 we aren't prepared to handle compatibility between the
7275	 further blocks and this block.  */
7276      if (insn == end)
7277	{
7278	  enum machine_mode submode;
7279
7280	  submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7281	  if (submode != VOIDmode)
7282	    {
7283	      gcc_assert (submode == mode);
7284	      found_equiv = true;
7285	      can_change_mode = false;
7286	    }
7287	}
7288    }
7289
7290  if (! found_equiv)
7291    return VOIDmode;
7292
7293  /* Now INSN_COUNT is the number of instructions we found which set
7294     CC_REG to a value equivalent to CC_SRC.  The instructions are in
7295     INSNS.  The modes used by those instructions are in MODES.  */
7296
7297  newreg = NULL_RTX;
7298  for (i = 0; i < insn_count; ++i)
7299    {
7300      if (modes[i] != mode)
7301	{
7302	  /* We need to change the mode of CC_REG in INSNS[i] and
7303	     subsequent instructions.  */
7304	  if (! newreg)
7305	    {
7306	      if (GET_MODE (cc_reg) == mode)
7307		newreg = cc_reg;
7308	      else
7309		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7310	    }
7311	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7312				    newreg);
7313	}
7314
7315      delete_insn_and_edges (insns[i]);
7316    }
7317
7318  return mode;
7319}
7320
7321/* If we have a fixed condition code register (or two), walk through
7322   the instructions and try to eliminate duplicate assignments.  */
7323
7324static void
7325cse_condition_code_reg (void)
7326{
7327  unsigned int cc_regno_1;
7328  unsigned int cc_regno_2;
7329  rtx cc_reg_1;
7330  rtx cc_reg_2;
7331  basic_block bb;
7332
7333  if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7334    return;
7335
7336  cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7337  if (cc_regno_2 != INVALID_REGNUM)
7338    cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7339  else
7340    cc_reg_2 = NULL_RTX;
7341
7342  FOR_EACH_BB (bb)
7343    {
7344      rtx last_insn;
7345      rtx cc_reg;
7346      rtx insn;
7347      rtx cc_src_insn;
7348      rtx cc_src;
7349      enum machine_mode mode;
7350      enum machine_mode orig_mode;
7351
7352      /* Look for blocks which end with a conditional jump based on a
7353	 condition code register.  Then look for the instruction which
7354	 sets the condition code register.  Then look through the
7355	 successor blocks for instructions which set the condition
7356	 code register to the same value.  There are other possible
7357	 uses of the condition code register, but these are by far the
7358	 most common and the ones which we are most likely to be able
7359	 to optimize.  */
7360
7361      last_insn = BB_END (bb);
7362      if (!JUMP_P (last_insn))
7363	continue;
7364
7365      if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7366	cc_reg = cc_reg_1;
7367      else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7368	cc_reg = cc_reg_2;
7369      else
7370	continue;
7371
7372      cc_src_insn = NULL_RTX;
7373      cc_src = NULL_RTX;
7374      for (insn = PREV_INSN (last_insn);
7375	   insn && insn != PREV_INSN (BB_HEAD (bb));
7376	   insn = PREV_INSN (insn))
7377	{
7378	  rtx set;
7379
7380	  if (! INSN_P (insn))
7381	    continue;
7382	  set = single_set (insn);
7383	  if (set
7384	      && REG_P (SET_DEST (set))
7385	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7386	    {
7387	      cc_src_insn = insn;
7388	      cc_src = SET_SRC (set);
7389	      break;
7390	    }
7391	  else if (reg_set_p (cc_reg, insn))
7392	    break;
7393	}
7394
7395      if (! cc_src_insn)
7396	continue;
7397
7398      if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7399	continue;
7400
7401      /* Now CC_REG is a condition code register used for a
7402	 conditional jump at the end of the block, and CC_SRC, in
7403	 CC_SRC_INSN, is the value to which that condition code
7404	 register is set, and CC_SRC is still meaningful at the end of
7405	 the basic block.  */
7406
7407      orig_mode = GET_MODE (cc_src);
7408      mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7409      if (mode != VOIDmode)
7410	{
7411	  gcc_assert (mode == GET_MODE (cc_src));
7412	  if (mode != orig_mode)
7413	    {
7414	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7415
7416	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7417
7418	      /* Do the same in the following insns that use the
7419		 current value of CC_REG within BB.  */
7420	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7421					NEXT_INSN (last_insn),
7422					newreg);
7423	    }
7424	}
7425    }
7426}
7427
7428
7429/* Perform common subexpression elimination.  Nonzero value from
7430   `cse_main' means that jumps were simplified and some code may now
7431   be unreachable, so do jump optimization again.  */
7432static bool
7433gate_handle_cse (void)
7434{
7435  return optimize > 0;
7436}
7437
7438static unsigned int
7439rest_of_handle_cse (void)
7440{
7441  int tem;
7442
7443  if (dump_file)
7444    dump_flow_info (dump_file, dump_flags);
7445
7446  tem = cse_main (get_insns (), max_reg_num ());
7447
7448  /* If we are not running more CSE passes, then we are no longer
7449     expecting CSE to be run.  But always rerun it in a cheap mode.  */
7450  cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7451
7452  if (tem == 2)
7453    {
7454      timevar_push (TV_JUMP);
7455      rebuild_jump_labels (get_insns ());
7456      cleanup_cfg (CLEANUP_CFG_CHANGED);
7457      timevar_pop (TV_JUMP);
7458    }
7459  else if (tem == 1 || optimize > 1)
7460    cleanup_cfg (0);
7461
7462  return 0;
7463}
7464
7465struct rtl_opt_pass pass_cse =
7466{
7467 {
7468  RTL_PASS,
7469  "cse1",                               /* name */
7470  OPTGROUP_NONE,                        /* optinfo_flags */
7471  gate_handle_cse,                      /* gate */
7472  rest_of_handle_cse,			/* execute */
7473  NULL,                                 /* sub */
7474  NULL,                                 /* next */
7475  0,                                    /* static_pass_number */
7476  TV_CSE,                               /* tv_id */
7477  0,                                    /* properties_required */
7478  0,                                    /* properties_provided */
7479  0,                                    /* properties_destroyed */
7480  0,                                    /* todo_flags_start */
7481  TODO_df_finish | TODO_verify_rtl_sharing |
7482  TODO_ggc_collect |
7483  TODO_verify_flow,                     /* todo_flags_finish */
7484 }
7485};
7486
7487
7488static bool
7489gate_handle_cse2 (void)
7490{
7491  return optimize > 0 && flag_rerun_cse_after_loop;
7492}
7493
7494/* Run second CSE pass after loop optimizations.  */
7495static unsigned int
7496rest_of_handle_cse2 (void)
7497{
7498  int tem;
7499
7500  if (dump_file)
7501    dump_flow_info (dump_file, dump_flags);
7502
7503  tem = cse_main (get_insns (), max_reg_num ());
7504
7505  /* Run a pass to eliminate duplicated assignments to condition code
7506     registers.  We have to run this after bypass_jumps, because it
7507     makes it harder for that pass to determine whether a jump can be
7508     bypassed safely.  */
7509  cse_condition_code_reg ();
7510
7511  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7512
7513  if (tem == 2)
7514    {
7515      timevar_push (TV_JUMP);
7516      rebuild_jump_labels (get_insns ());
7517      cleanup_cfg (CLEANUP_CFG_CHANGED);
7518      timevar_pop (TV_JUMP);
7519    }
7520  else if (tem == 1)
7521    cleanup_cfg (0);
7522
7523  cse_not_expected = 1;
7524  return 0;
7525}
7526
7527
7528struct rtl_opt_pass pass_cse2 =
7529{
7530 {
7531  RTL_PASS,
7532  "cse2",                               /* name */
7533  OPTGROUP_NONE,                        /* optinfo_flags */
7534  gate_handle_cse2,                     /* gate */
7535  rest_of_handle_cse2,			/* execute */
7536  NULL,                                 /* sub */
7537  NULL,                                 /* next */
7538  0,                                    /* static_pass_number */
7539  TV_CSE2,                              /* tv_id */
7540  0,                                    /* properties_required */
7541  0,                                    /* properties_provided */
7542  0,                                    /* properties_destroyed */
7543  0,                                    /* todo_flags_start */
7544  TODO_df_finish | TODO_verify_rtl_sharing |
7545  TODO_ggc_collect |
7546  TODO_verify_flow                      /* todo_flags_finish */
7547 }
7548};
7549
7550static bool
7551gate_handle_cse_after_global_opts (void)
7552{
7553  return optimize > 0 && flag_rerun_cse_after_global_opts;
7554}
7555
7556/* Run second CSE pass after loop optimizations.  */
7557static unsigned int
7558rest_of_handle_cse_after_global_opts (void)
7559{
7560  int save_cfj;
7561  int tem;
7562
7563  /* We only want to do local CSE, so don't follow jumps.  */
7564  save_cfj = flag_cse_follow_jumps;
7565  flag_cse_follow_jumps = 0;
7566
7567  rebuild_jump_labels (get_insns ());
7568  tem = cse_main (get_insns (), max_reg_num ());
7569  purge_all_dead_edges ();
7570  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7571
7572  cse_not_expected = !flag_rerun_cse_after_loop;
7573
7574  /* If cse altered any jumps, rerun jump opts to clean things up.  */
7575  if (tem == 2)
7576    {
7577      timevar_push (TV_JUMP);
7578      rebuild_jump_labels (get_insns ());
7579      cleanup_cfg (CLEANUP_CFG_CHANGED);
7580      timevar_pop (TV_JUMP);
7581    }
7582  else if (tem == 1)
7583    cleanup_cfg (0);
7584
7585  flag_cse_follow_jumps = save_cfj;
7586  return 0;
7587}
7588
7589struct rtl_opt_pass pass_cse_after_global_opts =
7590{
7591 {
7592  RTL_PASS,
7593  "cse_local",                          /* name */
7594  OPTGROUP_NONE,                        /* optinfo_flags */
7595  gate_handle_cse_after_global_opts,    /* gate */
7596  rest_of_handle_cse_after_global_opts, /* execute */
7597  NULL,                                 /* sub */
7598  NULL,                                 /* next */
7599  0,                                    /* static_pass_number */
7600  TV_CSE,                               /* tv_id */
7601  0,                                    /* properties_required */
7602  0,                                    /* properties_provided */
7603  0,                                    /* properties_destroyed */
7604  0,                                    /* todo_flags_start */
7605  TODO_df_finish | TODO_verify_rtl_sharing |
7606  TODO_ggc_collect |
7607  TODO_verify_flow                      /* todo_flags_finish */
7608 }
7609};
7610