1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS.  */
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "regs.h"
31#include "basic-block.h"
32#include "flags.h"
33#include "real.h"
34#include "insn-config.h"
35#include "recog.h"
36#include "function.h"
37#include "expr.h"
38#include "toplev.h"
39#include "output.h"
40#include "ggc.h"
41#include "timevar.h"
42#include "except.h"
43#include "target.h"
44#include "params.h"
45#include "rtlhooks-def.h"
46#include "tree-pass.h"
47#include "df.h"
48#include "dbgcnt.h"
49
50/* The basic idea of common subexpression elimination is to go
51   through the code, keeping a record of expressions that would
52   have the same value at the current scan point, and replacing
53   expressions encountered with the cheapest equivalent expression.
54
55   It is too complicated to keep track of the different possibilities
56   when control paths merge in this code; so, at each label, we forget all
57   that is known and start fresh.  This can be described as processing each
58   extended basic block separately.  We have a separate pass to perform
59   global CSE.
60
61   Note CSE can turn a conditional or computed jump into a nop or
62   an unconditional jump.  When this occurs we arrange to run the jump
63   optimizer after CSE to delete the unreachable code.
64
65   We use two data structures to record the equivalent expressions:
66   a hash table for most expressions, and a vector of "quantity
67   numbers" to record equivalent (pseudo) registers.
68
69   The use of the special data structure for registers is desirable
70   because it is faster.  It is possible because registers references
71   contain a fairly small number, the register number, taken from
72   a contiguously allocated series, and two register references are
73   identical if they have the same number.  General expressions
74   do not have any such thing, so the only way to retrieve the
75   information recorded on an expression other than a register
76   is to keep it in a hash table.
77
78Registers and "quantity numbers":
79
80   At the start of each basic block, all of the (hardware and pseudo)
81   registers used in the function are given distinct quantity
82   numbers to indicate their contents.  During scan, when the code
83   copies one register into another, we copy the quantity number.
84   When a register is loaded in any other way, we allocate a new
85   quantity number to describe the value generated by this operation.
86   `REG_QTY (N)' records what quantity register N is currently thought
87   of as containing.
88
89   All real quantity numbers are greater than or equal to zero.
90   If register N has not been assigned a quantity, `REG_QTY (N)' will
91   equal -N - 1, which is always negative.
92
93   Quantity numbers below zero do not exist and none of the `qty_table'
94   entries should be referenced with a negative index.
95
96   We also maintain a bidirectional chain of registers for each
97   quantity number.  The `qty_table` members `first_reg' and `last_reg',
98   and `reg_eqv_table' members `next' and `prev' hold these chains.
99
100   The first register in a chain is the one whose lifespan is least local.
101   Among equals, it is the one that was seen first.
102   We replace any equivalent register with that one.
103
104   If two registers have the same quantity number, it must be true that
105   REG expressions with qty_table `mode' must be in the hash table for both
106   registers and must be in the same class.
107
108   The converse is not true.  Since hard registers may be referenced in
109   any mode, two REG expressions might be equivalent in the hash table
110   but not have the same quantity number if the quantity number of one
111   of the registers is not the same mode as those expressions.
112
113Constants and quantity numbers
114
115   When a quantity has a known constant value, that value is stored
116   in the appropriate qty_table `const_rtx'.  This is in addition to
117   putting the constant in the hash table as is usual for non-regs.
118
119   Whether a reg or a constant is preferred is determined by the configuration
120   macro CONST_COSTS and will often depend on the constant value.  In any
121   event, expressions containing constants can be simplified, by fold_rtx.
122
123   When a quantity has a known nearly constant value (such as an address
124   of a stack slot), that value is stored in the appropriate qty_table
125   `const_rtx'.
126
127   Integer constants don't have a machine mode.  However, cse
128   determines the intended machine mode from the destination
129   of the instruction that moves the constant.  The machine mode
130   is recorded in the hash table along with the actual RTL
131   constant expression so that different modes are kept separate.
132
133Other expressions:
134
135   To record known equivalences among expressions in general
136   we use a hash table called `table'.  It has a fixed number of buckets
137   that contain chains of `struct table_elt' elements for expressions.
138   These chains connect the elements whose expressions have the same
139   hash codes.
140
141   Other chains through the same elements connect the elements which
142   currently have equivalent values.
143
144   Register references in an expression are canonicalized before hashing
145   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
146   The hash code of a register reference is computed using the quantity
147   number, not the register number.
148
149   When the value of an expression changes, it is necessary to remove from the
150   hash table not just that expression but all expressions whose values
151   could be different as a result.
152
153     1. If the value changing is in memory, except in special cases
154     ANYTHING referring to memory could be changed.  That is because
155     nobody knows where a pointer does not point.
156     The function `invalidate_memory' removes what is necessary.
157
158     The special cases are when the address is constant or is
159     a constant plus a fixed register such as the frame pointer
160     or a static chain pointer.  When such addresses are stored in,
161     we can tell exactly which other such addresses must be invalidated
162     due to overlap.  `invalidate' does this.
163     All expressions that refer to non-constant
164     memory addresses are also invalidated.  `invalidate_memory' does this.
165
166     2. If the value changing is a register, all expressions
167     containing references to that register, and only those,
168     must be removed.
169
170   Because searching the entire hash table for expressions that contain
171   a register is very slow, we try to figure out when it isn't necessary.
172   Precisely, this is necessary only when expressions have been
173   entered in the hash table using this register, and then the value has
174   changed, and then another expression wants to be added to refer to
175   the register's new value.  This sequence of circumstances is rare
176   within any one basic block.
177
178   `REG_TICK' and `REG_IN_TABLE', accessors for members of
179   cse_reg_info, are used to detect this case.  REG_TICK (i) is
180   incremented whenever a value is stored in register i.
181   REG_IN_TABLE (i) holds -1 if no references to register i have been
182   entered in the table; otherwise, it contains the value REG_TICK (i)
183   had when the references were entered.  If we want to enter a
184   reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
185   remove old references.  Until we want to enter a new entry, the
186   mere fact that the two vectors don't match makes the entries be
187   ignored if anyone tries to match them.
188
189   Registers themselves are entered in the hash table as well as in
190   the equivalent-register chains.  However, `REG_TICK' and
191   `REG_IN_TABLE' do not apply to expressions which are simple
192   register references.  These expressions are removed from the table
193   immediately when they become invalid, and this can be done even if
194   we do not immediately search for all the expressions that refer to
195   the register.
196
197   A CLOBBER rtx in an instruction invalidates its operand for further
198   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
199   invalidates everything that resides in memory.
200
201Related expressions:
202
203   Constant expressions that differ only by an additive integer
204   are called related.  When a constant expression is put in
205   the table, the related expression with no constant term
206   is also entered.  These are made to point at each other
207   so that it is possible to find out if there exists any
208   register equivalent to an expression related to a given expression.  */
209
210/* Length of qty_table vector.  We know in advance we will not need
211   a quantity number this big.  */
212
213static int max_qty;
214
215/* Next quantity number to be allocated.
216   This is 1 + the largest number needed so far.  */
217
218static int next_qty;
219
220/* Per-qty information tracking.
221
222   `first_reg' and `last_reg' track the head and tail of the
223   chain of registers which currently contain this quantity.
224
225   `mode' contains the machine mode of this quantity.
226
227   `const_rtx' holds the rtx of the constant value of this
228   quantity, if known.  A summations of the frame/arg pointer
229   and a constant can also be entered here.  When this holds
230   a known value, `const_insn' is the insn which stored the
231   constant value.
232
233   `comparison_{code,const,qty}' are used to track when a
234   comparison between a quantity and some constant or register has
235   been passed.  In such a case, we know the results of the comparison
236   in case we see it again.  These members record a comparison that
237   is known to be true.  `comparison_code' holds the rtx code of such
238   a comparison, else it is set to UNKNOWN and the other two
239   comparison members are undefined.  `comparison_const' holds
240   the constant being compared against, or zero if the comparison
241   is not against a constant.  `comparison_qty' holds the quantity
242   being compared against when the result is known.  If the comparison
243   is not with a register, `comparison_qty' is -1.  */
244
245struct qty_table_elem
246{
247  rtx const_rtx;
248  rtx const_insn;
249  rtx comparison_const;
250  int comparison_qty;
251  unsigned int first_reg, last_reg;
252  /* The sizes of these fields should match the sizes of the
253     code and mode fields of struct rtx_def (see rtl.h).  */
254  ENUM_BITFIELD(rtx_code) comparison_code : 16;
255  ENUM_BITFIELD(machine_mode) mode : 8;
256};
257
258/* The table of all qtys, indexed by qty number.  */
259static struct qty_table_elem *qty_table;
260
261/* Structure used to pass arguments via for_each_rtx to function
262   cse_change_cc_mode.  */
263struct change_cc_mode_args
264{
265  rtx insn;
266  rtx newreg;
267};
268
269#ifdef HAVE_cc0
270/* For machines that have a CC0, we do not record its value in the hash
271   table since its use is guaranteed to be the insn immediately following
272   its definition and any other insn is presumed to invalidate it.
273
274   Instead, we store below the current and last value assigned to CC0.
275   If it should happen to be a constant, it is stored in preference
276   to the actual assigned value.  In case it is a constant, we store
277   the mode in which the constant should be interpreted.  */
278
279static rtx this_insn_cc0, prev_insn_cc0;
280static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
281#endif
282
283/* Insn being scanned.  */
284
285static rtx this_insn;
286static bool optimize_this_for_speed_p;
287
288/* Index by register number, gives the number of the next (or
289   previous) register in the chain of registers sharing the same
290   value.
291
292   Or -1 if this register is at the end of the chain.
293
294   If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
295
296/* Per-register equivalence chain.  */
297struct reg_eqv_elem
298{
299  int next, prev;
300};
301
302/* The table of all register equivalence chains.  */
303static struct reg_eqv_elem *reg_eqv_table;
304
305struct cse_reg_info
306{
307  /* The timestamp at which this register is initialized.  */
308  unsigned int timestamp;
309
310  /* The quantity number of the register's current contents.  */
311  int reg_qty;
312
313  /* The number of times the register has been altered in the current
314     basic block.  */
315  int reg_tick;
316
317  /* The REG_TICK value at which rtx's containing this register are
318     valid in the hash table.  If this does not equal the current
319     reg_tick value, such expressions existing in the hash table are
320     invalid.  */
321  int reg_in_table;
322
323  /* The SUBREG that was set when REG_TICK was last incremented.  Set
324     to -1 if the last store was to the whole register, not a subreg.  */
325  unsigned int subreg_ticked;
326};
327
328/* A table of cse_reg_info indexed by register numbers.  */
329static struct cse_reg_info *cse_reg_info_table;
330
331/* The size of the above table.  */
332static unsigned int cse_reg_info_table_size;
333
334/* The index of the first entry that has not been initialized.  */
335static unsigned int cse_reg_info_table_first_uninitialized;
336
337/* The timestamp at the beginning of the current run of
338   cse_extended_basic_block.  We increment this variable at the beginning of
339   the current run of cse_extended_basic_block.  The timestamp field of a
340   cse_reg_info entry matches the value of this variable if and only
341   if the entry has been initialized during the current run of
342   cse_extended_basic_block.  */
343static unsigned int cse_reg_info_timestamp;
344
345/* A HARD_REG_SET containing all the hard registers for which there is
346   currently a REG expression in the hash table.  Note the difference
347   from the above variables, which indicate if the REG is mentioned in some
348   expression in the table.  */
349
350static HARD_REG_SET hard_regs_in_table;
351
352/* True if CSE has altered the CFG.  */
353static bool cse_cfg_altered;
354
355/* True if CSE has altered conditional jump insns in such a way
356   that jump optimization should be redone.  */
357static bool cse_jumps_altered;
358
359/* True if we put a LABEL_REF into the hash table for an INSN
360   without a REG_LABEL_OPERAND, we have to rerun jump after CSE
361   to put in the note.  */
362static bool recorded_label_ref;
363
364/* canon_hash stores 1 in do_not_record
365   if it notices a reference to CC0, PC, or some other volatile
366   subexpression.  */
367
368static int do_not_record;
369
370/* canon_hash stores 1 in hash_arg_in_memory
371   if it notices a reference to memory within the expression being hashed.  */
372
373static int hash_arg_in_memory;
374
375/* The hash table contains buckets which are chains of `struct table_elt's,
376   each recording one expression's information.
377   That expression is in the `exp' field.
378
379   The canon_exp field contains a canonical (from the point of view of
380   alias analysis) version of the `exp' field.
381
382   Those elements with the same hash code are chained in both directions
383   through the `next_same_hash' and `prev_same_hash' fields.
384
385   Each set of expressions with equivalent values
386   are on a two-way chain through the `next_same_value'
387   and `prev_same_value' fields, and all point with
388   the `first_same_value' field at the first element in
389   that chain.  The chain is in order of increasing cost.
390   Each element's cost value is in its `cost' field.
391
392   The `in_memory' field is nonzero for elements that
393   involve any reference to memory.  These elements are removed
394   whenever a write is done to an unidentified location in memory.
395   To be safe, we assume that a memory address is unidentified unless
396   the address is either a symbol constant or a constant plus
397   the frame pointer or argument pointer.
398
399   The `related_value' field is used to connect related expressions
400   (that differ by adding an integer).
401   The related expressions are chained in a circular fashion.
402   `related_value' is zero for expressions for which this
403   chain is not useful.
404
405   The `cost' field stores the cost of this element's expression.
406   The `regcost' field stores the value returned by approx_reg_cost for
407   this element's expression.
408
409   The `is_const' flag is set if the element is a constant (including
410   a fixed address).
411
412   The `flag' field is used as a temporary during some search routines.
413
414   The `mode' field is usually the same as GET_MODE (`exp'), but
415   if `exp' is a CONST_INT and has no machine mode then the `mode'
416   field is the mode it was being used as.  Each constant is
417   recorded separately for each mode it is used with.  */
418
419struct table_elt
420{
421  rtx exp;
422  rtx canon_exp;
423  struct table_elt *next_same_hash;
424  struct table_elt *prev_same_hash;
425  struct table_elt *next_same_value;
426  struct table_elt *prev_same_value;
427  struct table_elt *first_same_value;
428  struct table_elt *related_value;
429  int cost;
430  int regcost;
431  /* The size of this field should match the size
432     of the mode field of struct rtx_def (see rtl.h).  */
433  ENUM_BITFIELD(machine_mode) mode : 8;
434  char in_memory;
435  char is_const;
436  char flag;
437};
438
439/* We don't want a lot of buckets, because we rarely have very many
440   things stored in the hash table, and a lot of buckets slows
441   down a lot of loops that happen frequently.  */
442#define HASH_SHIFT	5
443#define HASH_SIZE	(1 << HASH_SHIFT)
444#define HASH_MASK	(HASH_SIZE - 1)
445
446/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
447   register (hard registers may require `do_not_record' to be set).  */
448
449#define HASH(X, M)	\
450 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
451  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
452  : canon_hash (X, M)) & HASH_MASK)
453
454/* Like HASH, but without side-effects.  */
455#define SAFE_HASH(X, M)	\
456 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
457  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
458  : safe_hash (X, M)) & HASH_MASK)
459
460/* Determine whether register number N is considered a fixed register for the
461   purpose of approximating register costs.
462   It is desirable to replace other regs with fixed regs, to reduce need for
463   non-fixed hard regs.
464   A reg wins if it is either the frame pointer or designated as fixed.  */
465#define FIXED_REGNO_P(N)  \
466  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
467   || fixed_regs[N] || global_regs[N])
468
469/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
470   hard registers and pointers into the frame are the cheapest with a cost
471   of 0.  Next come pseudos with a cost of one and other hard registers with
472   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
473
474#define CHEAP_REGNO(N)							\
475  (REGNO_PTR_FRAME_P(N)							\
476   || (HARD_REGISTER_NUM_P (N)						\
477       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
478
479#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
480#define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
481
482/* Get the number of times this register has been updated in this
483   basic block.  */
484
485#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
486
487/* Get the point at which REG was recorded in the table.  */
488
489#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
490
491/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
492   SUBREG).  */
493
494#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
495
496/* Get the quantity number for REG.  */
497
498#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
499
500/* Determine if the quantity number for register X represents a valid index
501   into the qty_table.  */
502
503#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
504
505/* Compare table_elt X and Y and return true iff X is cheaper than Y.  */
506
507#define CHEAPER(X, Y) \
508 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
509
510static struct table_elt *table[HASH_SIZE];
511
512/* Chain of `struct table_elt's made so far for this function
513   but currently removed from the table.  */
514
515static struct table_elt *free_element_chain;
516
517/* Set to the cost of a constant pool reference if one was found for a
518   symbolic constant.  If this was found, it means we should try to
519   convert constants into constant pool entries if they don't fit in
520   the insn.  */
521
522static int constant_pool_entries_cost;
523static int constant_pool_entries_regcost;
524
525/* Trace a patch through the CFG.  */
526
527struct branch_path
528{
529  /* The basic block for this path entry.  */
530  basic_block bb;
531};
532
533/* This data describes a block that will be processed by
534   cse_extended_basic_block.  */
535
536struct cse_basic_block_data
537{
538  /* Total number of SETs in block.  */
539  int nsets;
540  /* Size of current branch path, if any.  */
541  int path_size;
542  /* Current path, indicating which basic_blocks will be processed.  */
543  struct branch_path *path;
544};
545
546
547/* Pointers to the live in/live out bitmaps for the boundaries of the
548   current EBB.  */
549static bitmap cse_ebb_live_in, cse_ebb_live_out;
550
551/* A simple bitmap to track which basic blocks have been visited
552   already as part of an already processed extended basic block.  */
553static sbitmap cse_visited_basic_blocks;
554
555static bool fixed_base_plus_p (rtx x);
556static int notreg_cost (rtx, enum rtx_code);
557static int approx_reg_cost_1 (rtx *, void *);
558static int approx_reg_cost (rtx);
559static int preferable (int, int, int, int);
560static void new_basic_block (void);
561static void make_new_qty (unsigned int, enum machine_mode);
562static void make_regs_eqv (unsigned int, unsigned int);
563static void delete_reg_equiv (unsigned int);
564static int mention_regs (rtx);
565static int insert_regs (rtx, struct table_elt *, int);
566static void remove_from_table (struct table_elt *, unsigned);
567static void remove_pseudo_from_table (rtx, unsigned);
568static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
569static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
570static rtx lookup_as_function (rtx, enum rtx_code);
571static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
572					    enum machine_mode, int, int);
573static struct table_elt *insert (rtx, struct table_elt *, unsigned,
574				 enum machine_mode);
575static void merge_equiv_classes (struct table_elt *, struct table_elt *);
576static void invalidate (rtx, enum machine_mode);
577static bool cse_rtx_varies_p (const_rtx, bool);
578static void remove_invalid_refs (unsigned int);
579static void remove_invalid_subreg_refs (unsigned int, unsigned int,
580					enum machine_mode);
581static void rehash_using_reg (rtx);
582static void invalidate_memory (void);
583static void invalidate_for_call (void);
584static rtx use_related_value (rtx, struct table_elt *);
585
586static inline unsigned canon_hash (rtx, enum machine_mode);
587static inline unsigned safe_hash (rtx, enum machine_mode);
588static inline unsigned hash_rtx_string (const char *);
589
590static rtx canon_reg (rtx, rtx);
591static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
592					   enum machine_mode *,
593					   enum machine_mode *);
594static rtx fold_rtx (rtx, rtx);
595static rtx equiv_constant (rtx);
596static void record_jump_equiv (rtx, bool);
597static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
598			      int);
599static void cse_insn (rtx);
600static void cse_prescan_path (struct cse_basic_block_data *);
601static void invalidate_from_clobbers (rtx);
602static rtx cse_process_notes (rtx, rtx, bool *);
603static void cse_extended_basic_block (struct cse_basic_block_data *);
604static void count_reg_usage (rtx, int *, rtx, int);
605static int check_for_label_ref (rtx *, void *);
606extern void dump_class (struct table_elt*);
607static void get_cse_reg_info_1 (unsigned int regno);
608static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
609static int check_dependence (rtx *, void *);
610
611static void flush_hash_table (void);
612static bool insn_live_p (rtx, int *);
613static bool set_live_p (rtx, rtx, int *);
614static int cse_change_cc_mode (rtx *, void *);
615static void cse_change_cc_mode_insn (rtx, rtx);
616static void cse_change_cc_mode_insns (rtx, rtx, rtx);
617static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
618				       bool);
619
620
621#undef RTL_HOOKS_GEN_LOWPART
622#define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
623
624static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
625
626/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
627   virtual regs here because the simplify_*_operation routines are called
628   by integrate.c, which is called before virtual register instantiation.  */
629
630static bool
631fixed_base_plus_p (rtx x)
632{
633  switch (GET_CODE (x))
634    {
635    case REG:
636      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
637	return true;
638      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
639	return true;
640      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
641	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
642	return true;
643      return false;
644
645    case PLUS:
646      if (!CONST_INT_P (XEXP (x, 1)))
647	return false;
648      return fixed_base_plus_p (XEXP (x, 0));
649
650    default:
651      return false;
652    }
653}
654
655/* Dump the expressions in the equivalence class indicated by CLASSP.
656   This function is used only for debugging.  */
657void
658dump_class (struct table_elt *classp)
659{
660  struct table_elt *elt;
661
662  fprintf (stderr, "Equivalence chain for ");
663  print_rtl (stderr, classp->exp);
664  fprintf (stderr, ": \n");
665
666  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
667    {
668      print_rtl (stderr, elt->exp);
669      fprintf (stderr, "\n");
670    }
671}
672
673/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
674
675static int
676approx_reg_cost_1 (rtx *xp, void *data)
677{
678  rtx x = *xp;
679  int *cost_p = (int *) data;
680
681  if (x && REG_P (x))
682    {
683      unsigned int regno = REGNO (x);
684
685      if (! CHEAP_REGNO (regno))
686	{
687	  if (regno < FIRST_PSEUDO_REGISTER)
688	    {
689	      if (SMALL_REGISTER_CLASSES)
690		return 1;
691	      *cost_p += 2;
692	    }
693	  else
694	    *cost_p += 1;
695	}
696    }
697
698  return 0;
699}
700
701/* Return an estimate of the cost of the registers used in an rtx.
702   This is mostly the number of different REG expressions in the rtx;
703   however for some exceptions like fixed registers we use a cost of
704   0.  If any other hard register reference occurs, return MAX_COST.  */
705
706static int
707approx_reg_cost (rtx x)
708{
709  int cost = 0;
710
711  if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
712    return MAX_COST;
713
714  return cost;
715}
716
717/* Return a negative value if an rtx A, whose costs are given by COST_A
718   and REGCOST_A, is more desirable than an rtx B.
719   Return a positive value if A is less desirable, or 0 if the two are
720   equally good.  */
721static int
722preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
723{
724  /* First, get rid of cases involving expressions that are entirely
725     unwanted.  */
726  if (cost_a != cost_b)
727    {
728      if (cost_a == MAX_COST)
729	return 1;
730      if (cost_b == MAX_COST)
731	return -1;
732    }
733
734  /* Avoid extending lifetimes of hardregs.  */
735  if (regcost_a != regcost_b)
736    {
737      if (regcost_a == MAX_COST)
738	return 1;
739      if (regcost_b == MAX_COST)
740	return -1;
741    }
742
743  /* Normal operation costs take precedence.  */
744  if (cost_a != cost_b)
745    return cost_a - cost_b;
746  /* Only if these are identical consider effects on register pressure.  */
747  if (regcost_a != regcost_b)
748    return regcost_a - regcost_b;
749  return 0;
750}
751
752/* Internal function, to compute cost when X is not a register; called
753   from COST macro to keep it simple.  */
754
755static int
756notreg_cost (rtx x, enum rtx_code outer)
757{
758  return ((GET_CODE (x) == SUBREG
759	   && REG_P (SUBREG_REG (x))
760	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
761	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
762	   && (GET_MODE_SIZE (GET_MODE (x))
763	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
764	   && subreg_lowpart_p (x)
765	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
766				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
767	  ? 0
768	  : rtx_cost (x, outer, optimize_this_for_speed_p) * 2);
769}
770
771
772/* Initialize CSE_REG_INFO_TABLE.  */
773
774static void
775init_cse_reg_info (unsigned int nregs)
776{
777  /* Do we need to grow the table?  */
778  if (nregs > cse_reg_info_table_size)
779    {
780      unsigned int new_size;
781
782      if (cse_reg_info_table_size < 2048)
783	{
784	  /* Compute a new size that is a power of 2 and no smaller
785	     than the large of NREGS and 64.  */
786	  new_size = (cse_reg_info_table_size
787		      ? cse_reg_info_table_size : 64);
788
789	  while (new_size < nregs)
790	    new_size *= 2;
791	}
792      else
793	{
794	  /* If we need a big table, allocate just enough to hold
795	     NREGS registers.  */
796	  new_size = nregs;
797	}
798
799      /* Reallocate the table with NEW_SIZE entries.  */
800      if (cse_reg_info_table)
801	free (cse_reg_info_table);
802      cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
803      cse_reg_info_table_size = new_size;
804      cse_reg_info_table_first_uninitialized = 0;
805    }
806
807  /* Do we have all of the first NREGS entries initialized?  */
808  if (cse_reg_info_table_first_uninitialized < nregs)
809    {
810      unsigned int old_timestamp = cse_reg_info_timestamp - 1;
811      unsigned int i;
812
813      /* Put the old timestamp on newly allocated entries so that they
814	 will all be considered out of date.  We do not touch those
815	 entries beyond the first NREGS entries to be nice to the
816	 virtual memory.  */
817      for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
818	cse_reg_info_table[i].timestamp = old_timestamp;
819
820      cse_reg_info_table_first_uninitialized = nregs;
821    }
822}
823
824/* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
825
826static void
827get_cse_reg_info_1 (unsigned int regno)
828{
829  /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
830     entry will be considered to have been initialized.  */
831  cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
832
833  /* Initialize the rest of the entry.  */
834  cse_reg_info_table[regno].reg_tick = 1;
835  cse_reg_info_table[regno].reg_in_table = -1;
836  cse_reg_info_table[regno].subreg_ticked = -1;
837  cse_reg_info_table[regno].reg_qty = -regno - 1;
838}
839
840/* Find a cse_reg_info entry for REGNO.  */
841
842static inline struct cse_reg_info *
843get_cse_reg_info (unsigned int regno)
844{
845  struct cse_reg_info *p = &cse_reg_info_table[regno];
846
847  /* If this entry has not been initialized, go ahead and initialize
848     it.  */
849  if (p->timestamp != cse_reg_info_timestamp)
850    get_cse_reg_info_1 (regno);
851
852  return p;
853}
854
855/* Clear the hash table and initialize each register with its own quantity,
856   for a new basic block.  */
857
858static void
859new_basic_block (void)
860{
861  int i;
862
863  next_qty = 0;
864
865  /* Invalidate cse_reg_info_table.  */
866  cse_reg_info_timestamp++;
867
868  /* Clear out hash table state for this pass.  */
869  CLEAR_HARD_REG_SET (hard_regs_in_table);
870
871  /* The per-quantity values used to be initialized here, but it is
872     much faster to initialize each as it is made in `make_new_qty'.  */
873
874  for (i = 0; i < HASH_SIZE; i++)
875    {
876      struct table_elt *first;
877
878      first = table[i];
879      if (first != NULL)
880	{
881	  struct table_elt *last = first;
882
883	  table[i] = NULL;
884
885	  while (last->next_same_hash != NULL)
886	    last = last->next_same_hash;
887
888	  /* Now relink this hash entire chain into
889	     the free element list.  */
890
891	  last->next_same_hash = free_element_chain;
892	  free_element_chain = first;
893	}
894    }
895
896#ifdef HAVE_cc0
897  prev_insn_cc0 = 0;
898#endif
899}
900
901/* Say that register REG contains a quantity in mode MODE not in any
902   register before and initialize that quantity.  */
903
904static void
905make_new_qty (unsigned int reg, enum machine_mode mode)
906{
907  int q;
908  struct qty_table_elem *ent;
909  struct reg_eqv_elem *eqv;
910
911  gcc_assert (next_qty < max_qty);
912
913  q = REG_QTY (reg) = next_qty++;
914  ent = &qty_table[q];
915  ent->first_reg = reg;
916  ent->last_reg = reg;
917  ent->mode = mode;
918  ent->const_rtx = ent->const_insn = NULL_RTX;
919  ent->comparison_code = UNKNOWN;
920
921  eqv = &reg_eqv_table[reg];
922  eqv->next = eqv->prev = -1;
923}
924
925/* Make reg NEW equivalent to reg OLD.
926   OLD is not changing; NEW is.  */
927
928static void
929make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
930{
931  unsigned int lastr, firstr;
932  int q = REG_QTY (old_reg);
933  struct qty_table_elem *ent;
934
935  ent = &qty_table[q];
936
937  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
938  gcc_assert (REGNO_QTY_VALID_P (old_reg));
939
940  REG_QTY (new_reg) = q;
941  firstr = ent->first_reg;
942  lastr = ent->last_reg;
943
944  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
945     hard regs.  Among pseudos, if NEW will live longer than any other reg
946     of the same qty, and that is beyond the current basic block,
947     make it the new canonical replacement for this qty.  */
948  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
949      /* Certain fixed registers might be of the class NO_REGS.  This means
950	 that not only can they not be allocated by the compiler, but
951	 they cannot be used in substitutions or canonicalizations
952	 either.  */
953      && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
954      && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
955	  || (new_reg >= FIRST_PSEUDO_REGISTER
956	      && (firstr < FIRST_PSEUDO_REGISTER
957		  || (bitmap_bit_p (cse_ebb_live_out, new_reg)
958		      && !bitmap_bit_p (cse_ebb_live_out, firstr))
959		  || (bitmap_bit_p (cse_ebb_live_in, new_reg)
960		      && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
961    {
962      reg_eqv_table[firstr].prev = new_reg;
963      reg_eqv_table[new_reg].next = firstr;
964      reg_eqv_table[new_reg].prev = -1;
965      ent->first_reg = new_reg;
966    }
967  else
968    {
969      /* If NEW is a hard reg (known to be non-fixed), insert at end.
970	 Otherwise, insert before any non-fixed hard regs that are at the
971	 end.  Registers of class NO_REGS cannot be used as an
972	 equivalent for anything.  */
973      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
974	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
975	     && new_reg >= FIRST_PSEUDO_REGISTER)
976	lastr = reg_eqv_table[lastr].prev;
977      reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
978      if (reg_eqv_table[lastr].next >= 0)
979	reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
980      else
981	qty_table[q].last_reg = new_reg;
982      reg_eqv_table[lastr].next = new_reg;
983      reg_eqv_table[new_reg].prev = lastr;
984    }
985}
986
987/* Remove REG from its equivalence class.  */
988
989static void
990delete_reg_equiv (unsigned int reg)
991{
992  struct qty_table_elem *ent;
993  int q = REG_QTY (reg);
994  int p, n;
995
996  /* If invalid, do nothing.  */
997  if (! REGNO_QTY_VALID_P (reg))
998    return;
999
1000  ent = &qty_table[q];
1001
1002  p = reg_eqv_table[reg].prev;
1003  n = reg_eqv_table[reg].next;
1004
1005  if (n != -1)
1006    reg_eqv_table[n].prev = p;
1007  else
1008    ent->last_reg = p;
1009  if (p != -1)
1010    reg_eqv_table[p].next = n;
1011  else
1012    ent->first_reg = n;
1013
1014  REG_QTY (reg) = -reg - 1;
1015}
1016
1017/* Remove any invalid expressions from the hash table
1018   that refer to any of the registers contained in expression X.
1019
1020   Make sure that newly inserted references to those registers
1021   as subexpressions will be considered valid.
1022
1023   mention_regs is not called when a register itself
1024   is being stored in the table.
1025
1026   Return 1 if we have done something that may have changed the hash code
1027   of X.  */
1028
1029static int
1030mention_regs (rtx x)
1031{
1032  enum rtx_code code;
1033  int i, j;
1034  const char *fmt;
1035  int changed = 0;
1036
1037  if (x == 0)
1038    return 0;
1039
1040  code = GET_CODE (x);
1041  if (code == REG)
1042    {
1043      unsigned int regno = REGNO (x);
1044      unsigned int endregno = END_REGNO (x);
1045      unsigned int i;
1046
1047      for (i = regno; i < endregno; i++)
1048	{
1049	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1050	    remove_invalid_refs (i);
1051
1052	  REG_IN_TABLE (i) = REG_TICK (i);
1053	  SUBREG_TICKED (i) = -1;
1054	}
1055
1056      return 0;
1057    }
1058
1059  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1060     pseudo if they don't use overlapping words.  We handle only pseudos
1061     here for simplicity.  */
1062  if (code == SUBREG && REG_P (SUBREG_REG (x))
1063      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1064    {
1065      unsigned int i = REGNO (SUBREG_REG (x));
1066
1067      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1068	{
1069	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1070	     the last store to this register really stored into this
1071	     subreg, then remove the memory of this subreg.
1072	     Otherwise, remove any memory of the entire register and
1073	     all its subregs from the table.  */
1074	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1075	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1076	    remove_invalid_refs (i);
1077	  else
1078	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1079	}
1080
1081      REG_IN_TABLE (i) = REG_TICK (i);
1082      SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1083      return 0;
1084    }
1085
1086  /* If X is a comparison or a COMPARE and either operand is a register
1087     that does not have a quantity, give it one.  This is so that a later
1088     call to record_jump_equiv won't cause X to be assigned a different
1089     hash code and not found in the table after that call.
1090
1091     It is not necessary to do this here, since rehash_using_reg can
1092     fix up the table later, but doing this here eliminates the need to
1093     call that expensive function in the most common case where the only
1094     use of the register is in the comparison.  */
1095
1096  if (code == COMPARE || COMPARISON_P (x))
1097    {
1098      if (REG_P (XEXP (x, 0))
1099	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1100	if (insert_regs (XEXP (x, 0), NULL, 0))
1101	  {
1102	    rehash_using_reg (XEXP (x, 0));
1103	    changed = 1;
1104	  }
1105
1106      if (REG_P (XEXP (x, 1))
1107	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1108	if (insert_regs (XEXP (x, 1), NULL, 0))
1109	  {
1110	    rehash_using_reg (XEXP (x, 1));
1111	    changed = 1;
1112	  }
1113    }
1114
1115  fmt = GET_RTX_FORMAT (code);
1116  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1117    if (fmt[i] == 'e')
1118      changed |= mention_regs (XEXP (x, i));
1119    else if (fmt[i] == 'E')
1120      for (j = 0; j < XVECLEN (x, i); j++)
1121	changed |= mention_regs (XVECEXP (x, i, j));
1122
1123  return changed;
1124}
1125
1126/* Update the register quantities for inserting X into the hash table
1127   with a value equivalent to CLASSP.
1128   (If the class does not contain a REG, it is irrelevant.)
1129   If MODIFIED is nonzero, X is a destination; it is being modified.
1130   Note that delete_reg_equiv should be called on a register
1131   before insert_regs is done on that register with MODIFIED != 0.
1132
1133   Nonzero value means that elements of reg_qty have changed
1134   so X's hash code may be different.  */
1135
1136static int
1137insert_regs (rtx x, struct table_elt *classp, int modified)
1138{
1139  if (REG_P (x))
1140    {
1141      unsigned int regno = REGNO (x);
1142      int qty_valid;
1143
1144      /* If REGNO is in the equivalence table already but is of the
1145	 wrong mode for that equivalence, don't do anything here.  */
1146
1147      qty_valid = REGNO_QTY_VALID_P (regno);
1148      if (qty_valid)
1149	{
1150	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1151
1152	  if (ent->mode != GET_MODE (x))
1153	    return 0;
1154	}
1155
1156      if (modified || ! qty_valid)
1157	{
1158	  if (classp)
1159	    for (classp = classp->first_same_value;
1160		 classp != 0;
1161		 classp = classp->next_same_value)
1162	      if (REG_P (classp->exp)
1163		  && GET_MODE (classp->exp) == GET_MODE (x))
1164		{
1165		  unsigned c_regno = REGNO (classp->exp);
1166
1167		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1168
1169		  /* Suppose that 5 is hard reg and 100 and 101 are
1170		     pseudos.  Consider
1171
1172		     (set (reg:si 100) (reg:si 5))
1173		     (set (reg:si 5) (reg:si 100))
1174		     (set (reg:di 101) (reg:di 5))
1175
1176		     We would now set REG_QTY (101) = REG_QTY (5), but the
1177		     entry for 5 is in SImode.  When we use this later in
1178		     copy propagation, we get the register in wrong mode.  */
1179		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1180		    continue;
1181
1182		  make_regs_eqv (regno, c_regno);
1183		  return 1;
1184		}
1185
1186	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1187	     than REG_IN_TABLE to find out if there was only a single preceding
1188	     invalidation - for the SUBREG - or another one, which would be
1189	     for the full register.  However, if we find here that REG_TICK
1190	     indicates that the register is invalid, it means that it has
1191	     been invalidated in a separate operation.  The SUBREG might be used
1192	     now (then this is a recursive call), or we might use the full REG
1193	     now and a SUBREG of it later.  So bump up REG_TICK so that
1194	     mention_regs will do the right thing.  */
1195	  if (! modified
1196	      && REG_IN_TABLE (regno) >= 0
1197	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1198	    REG_TICK (regno)++;
1199	  make_new_qty (regno, GET_MODE (x));
1200	  return 1;
1201	}
1202
1203      return 0;
1204    }
1205
1206  /* If X is a SUBREG, we will likely be inserting the inner register in the
1207     table.  If that register doesn't have an assigned quantity number at
1208     this point but does later, the insertion that we will be doing now will
1209     not be accessible because its hash code will have changed.  So assign
1210     a quantity number now.  */
1211
1212  else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1213	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1214    {
1215      insert_regs (SUBREG_REG (x), NULL, 0);
1216      mention_regs (x);
1217      return 1;
1218    }
1219  else
1220    return mention_regs (x);
1221}
1222
1223
1224/* Compute upper and lower anchors for CST.  Also compute the offset of CST
1225   from these anchors/bases such that *_BASE + *_OFFS = CST.  Return false iff
1226   CST is equal to an anchor.  */
1227
1228static bool
1229compute_const_anchors (rtx cst,
1230		       HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1231		       HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1232{
1233  HOST_WIDE_INT n = INTVAL (cst);
1234
1235  *lower_base = n & ~(targetm.const_anchor - 1);
1236  if (*lower_base == n)
1237    return false;
1238
1239  *upper_base =
1240    (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1241  *upper_offs = n - *upper_base;
1242  *lower_offs = n - *lower_base;
1243  return true;
1244}
1245
1246/* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE.  */
1247
1248static void
1249insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1250		     enum machine_mode mode)
1251{
1252  struct table_elt *elt;
1253  unsigned hash;
1254  rtx anchor_exp;
1255  rtx exp;
1256
1257  anchor_exp = GEN_INT (anchor);
1258  hash = HASH (anchor_exp, mode);
1259  elt = lookup (anchor_exp, hash, mode);
1260  if (!elt)
1261    elt = insert (anchor_exp, NULL, hash, mode);
1262
1263  exp = plus_constant (reg, offs);
1264  /* REG has just been inserted and the hash codes recomputed.  */
1265  mention_regs (exp);
1266  hash = HASH (exp, mode);
1267
1268  /* Use the cost of the register rather than the whole expression.  When
1269     looking up constant anchors we will further offset the corresponding
1270     expression therefore it does not make sense to prefer REGs over
1271     reg-immediate additions.  Prefer instead the oldest expression.  Also
1272     don't prefer pseudos over hard regs so that we derive constants in
1273     argument registers from other argument registers rather than from the
1274     original pseudo that was used to synthesize the constant.  */
1275  insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1276}
1277
1278/* The constant CST is equivalent to the register REG.  Create
1279   equivalences between the two anchors of CST and the corresponding
1280   register-offset expressions using REG.  */
1281
1282static void
1283insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
1284{
1285  HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1286
1287  if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1288			      &upper_base, &upper_offs))
1289      return;
1290
1291  /* Ignore anchors of value 0.  Constants accessible from zero are
1292     simple.  */
1293  if (lower_base != 0)
1294    insert_const_anchor (lower_base, reg, -lower_offs, mode);
1295
1296  if (upper_base != 0)
1297    insert_const_anchor (upper_base, reg, -upper_offs, mode);
1298}
1299
1300/* We need to express ANCHOR_ELT->exp + OFFS.  Walk the equivalence list of
1301   ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1302   valid expression.  Return the cheapest and oldest of such expressions.  In
1303   *OLD, return how old the resulting expression is compared to the other
1304   equivalent expressions.  */
1305
1306static rtx
1307find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1308			   unsigned *old)
1309{
1310  struct table_elt *elt;
1311  unsigned idx;
1312  struct table_elt *match_elt;
1313  rtx match;
1314
1315  /* Find the cheapest and *oldest* expression to maximize the chance of
1316     reusing the same pseudo.  */
1317
1318  match_elt = NULL;
1319  match = NULL_RTX;
1320  for (elt = anchor_elt->first_same_value, idx = 0;
1321       elt;
1322       elt = elt->next_same_value, idx++)
1323    {
1324      if (match_elt && CHEAPER (match_elt, elt))
1325	return match;
1326
1327      if (REG_P (elt->exp)
1328	  || (GET_CODE (elt->exp) == PLUS
1329	      && REG_P (XEXP (elt->exp, 0))
1330	      && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1331	{
1332	  rtx x;
1333
1334	  /* Ignore expressions that are no longer valid.  */
1335	  if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1336	    continue;
1337
1338	  x = plus_constant (elt->exp, offs);
1339	  if (REG_P (x)
1340	      || (GET_CODE (x) == PLUS
1341		  && IN_RANGE (INTVAL (XEXP (x, 1)),
1342			       -targetm.const_anchor,
1343			       targetm.const_anchor - 1)))
1344	    {
1345	      match = x;
1346	      match_elt = elt;
1347	      *old = idx;
1348	    }
1349	}
1350    }
1351
1352  return match;
1353}
1354
1355/* Try to express the constant SRC_CONST using a register+offset expression
1356   derived from a constant anchor.  Return it if successful or NULL_RTX,
1357   otherwise.  */
1358
1359static rtx
1360try_const_anchors (rtx src_const, enum machine_mode mode)
1361{
1362  struct table_elt *lower_elt, *upper_elt;
1363  HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1364  rtx lower_anchor_rtx, upper_anchor_rtx;
1365  rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1366  unsigned lower_old, upper_old;
1367
1368  if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1369			      &upper_base, &upper_offs))
1370    return NULL_RTX;
1371
1372  lower_anchor_rtx = GEN_INT (lower_base);
1373  upper_anchor_rtx = GEN_INT (upper_base);
1374  lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1375  upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1376
1377  if (lower_elt)
1378    lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1379  if (upper_elt)
1380    upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1381
1382  if (!lower_exp)
1383    return upper_exp;
1384  if (!upper_exp)
1385    return lower_exp;
1386
1387  /* Return the older expression.  */
1388  return (upper_old > lower_old ? upper_exp : lower_exp);
1389}
1390
1391/* Look in or update the hash table.  */
1392
1393/* Remove table element ELT from use in the table.
1394   HASH is its hash code, made using the HASH macro.
1395   It's an argument because often that is known in advance
1396   and we save much time not recomputing it.  */
1397
1398static void
1399remove_from_table (struct table_elt *elt, unsigned int hash)
1400{
1401  if (elt == 0)
1402    return;
1403
1404  /* Mark this element as removed.  See cse_insn.  */
1405  elt->first_same_value = 0;
1406
1407  /* Remove the table element from its equivalence class.  */
1408
1409  {
1410    struct table_elt *prev = elt->prev_same_value;
1411    struct table_elt *next = elt->next_same_value;
1412
1413    if (next)
1414      next->prev_same_value = prev;
1415
1416    if (prev)
1417      prev->next_same_value = next;
1418    else
1419      {
1420	struct table_elt *newfirst = next;
1421	while (next)
1422	  {
1423	    next->first_same_value = newfirst;
1424	    next = next->next_same_value;
1425	  }
1426      }
1427  }
1428
1429  /* Remove the table element from its hash bucket.  */
1430
1431  {
1432    struct table_elt *prev = elt->prev_same_hash;
1433    struct table_elt *next = elt->next_same_hash;
1434
1435    if (next)
1436      next->prev_same_hash = prev;
1437
1438    if (prev)
1439      prev->next_same_hash = next;
1440    else if (table[hash] == elt)
1441      table[hash] = next;
1442    else
1443      {
1444	/* This entry is not in the proper hash bucket.  This can happen
1445	   when two classes were merged by `merge_equiv_classes'.  Search
1446	   for the hash bucket that it heads.  This happens only very
1447	   rarely, so the cost is acceptable.  */
1448	for (hash = 0; hash < HASH_SIZE; hash++)
1449	  if (table[hash] == elt)
1450	    table[hash] = next;
1451      }
1452  }
1453
1454  /* Remove the table element from its related-value circular chain.  */
1455
1456  if (elt->related_value != 0 && elt->related_value != elt)
1457    {
1458      struct table_elt *p = elt->related_value;
1459
1460      while (p->related_value != elt)
1461	p = p->related_value;
1462      p->related_value = elt->related_value;
1463      if (p->related_value == p)
1464	p->related_value = 0;
1465    }
1466
1467  /* Now add it to the free element chain.  */
1468  elt->next_same_hash = free_element_chain;
1469  free_element_chain = elt;
1470}
1471
1472/* Same as above, but X is a pseudo-register.  */
1473
1474static void
1475remove_pseudo_from_table (rtx x, unsigned int hash)
1476{
1477  struct table_elt *elt;
1478
1479  /* Because a pseudo-register can be referenced in more than one
1480     mode, we might have to remove more than one table entry.  */
1481  while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1482    remove_from_table (elt, hash);
1483}
1484
1485/* Look up X in the hash table and return its table element,
1486   or 0 if X is not in the table.
1487
1488   MODE is the machine-mode of X, or if X is an integer constant
1489   with VOIDmode then MODE is the mode with which X will be used.
1490
1491   Here we are satisfied to find an expression whose tree structure
1492   looks like X.  */
1493
1494static struct table_elt *
1495lookup (rtx x, unsigned int hash, enum machine_mode mode)
1496{
1497  struct table_elt *p;
1498
1499  for (p = table[hash]; p; p = p->next_same_hash)
1500    if (mode == p->mode && ((x == p->exp && REG_P (x))
1501			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1502      return p;
1503
1504  return 0;
1505}
1506
1507/* Like `lookup' but don't care whether the table element uses invalid regs.
1508   Also ignore discrepancies in the machine mode of a register.  */
1509
1510static struct table_elt *
1511lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1512{
1513  struct table_elt *p;
1514
1515  if (REG_P (x))
1516    {
1517      unsigned int regno = REGNO (x);
1518
1519      /* Don't check the machine mode when comparing registers;
1520	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1521      for (p = table[hash]; p; p = p->next_same_hash)
1522	if (REG_P (p->exp)
1523	    && REGNO (p->exp) == regno)
1524	  return p;
1525    }
1526  else
1527    {
1528      for (p = table[hash]; p; p = p->next_same_hash)
1529	if (mode == p->mode
1530	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1531	  return p;
1532    }
1533
1534  return 0;
1535}
1536
1537/* Look for an expression equivalent to X and with code CODE.
1538   If one is found, return that expression.  */
1539
1540static rtx
1541lookup_as_function (rtx x, enum rtx_code code)
1542{
1543  struct table_elt *p
1544    = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1545
1546  if (p == 0)
1547    return 0;
1548
1549  for (p = p->first_same_value; p; p = p->next_same_value)
1550    if (GET_CODE (p->exp) == code
1551	/* Make sure this is a valid entry in the table.  */
1552	&& exp_equiv_p (p->exp, p->exp, 1, false))
1553      return p->exp;
1554
1555  return 0;
1556}
1557
1558/* Insert X in the hash table, assuming HASH is its hash code and
1559   CLASSP is an element of the class it should go in (or 0 if a new
1560   class should be made).  COST is the code of X and reg_cost is the
1561   cost of registers in X.  It is inserted at the proper position to
1562   keep the class in the order cheapest first.
1563
1564   MODE is the machine-mode of X, or if X is an integer constant
1565   with VOIDmode then MODE is the mode with which X will be used.
1566
1567   For elements of equal cheapness, the most recent one
1568   goes in front, except that the first element in the list
1569   remains first unless a cheaper element is added.  The order of
1570   pseudo-registers does not matter, as canon_reg will be called to
1571   find the cheapest when a register is retrieved from the table.
1572
1573   The in_memory field in the hash table element is set to 0.
1574   The caller must set it nonzero if appropriate.
1575
1576   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1577   and if insert_regs returns a nonzero value
1578   you must then recompute its hash code before calling here.
1579
1580   If necessary, update table showing constant values of quantities.  */
1581
1582static struct table_elt *
1583insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1584		   enum machine_mode mode, int cost, int reg_cost)
1585{
1586  struct table_elt *elt;
1587
1588  /* If X is a register and we haven't made a quantity for it,
1589     something is wrong.  */
1590  gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1591
1592  /* If X is a hard register, show it is being put in the table.  */
1593  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1594    add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1595
1596  /* Put an element for X into the right hash bucket.  */
1597
1598  elt = free_element_chain;
1599  if (elt)
1600    free_element_chain = elt->next_same_hash;
1601  else
1602    elt = XNEW (struct table_elt);
1603
1604  elt->exp = x;
1605  elt->canon_exp = NULL_RTX;
1606  elt->cost = cost;
1607  elt->regcost = reg_cost;
1608  elt->next_same_value = 0;
1609  elt->prev_same_value = 0;
1610  elt->next_same_hash = table[hash];
1611  elt->prev_same_hash = 0;
1612  elt->related_value = 0;
1613  elt->in_memory = 0;
1614  elt->mode = mode;
1615  elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1616
1617  if (table[hash])
1618    table[hash]->prev_same_hash = elt;
1619  table[hash] = elt;
1620
1621  /* Put it into the proper value-class.  */
1622  if (classp)
1623    {
1624      classp = classp->first_same_value;
1625      if (CHEAPER (elt, classp))
1626	/* Insert at the head of the class.  */
1627	{
1628	  struct table_elt *p;
1629	  elt->next_same_value = classp;
1630	  classp->prev_same_value = elt;
1631	  elt->first_same_value = elt;
1632
1633	  for (p = classp; p; p = p->next_same_value)
1634	    p->first_same_value = elt;
1635	}
1636      else
1637	{
1638	  /* Insert not at head of the class.  */
1639	  /* Put it after the last element cheaper than X.  */
1640	  struct table_elt *p, *next;
1641
1642	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1643	       p = next);
1644
1645	  /* Put it after P and before NEXT.  */
1646	  elt->next_same_value = next;
1647	  if (next)
1648	    next->prev_same_value = elt;
1649
1650	  elt->prev_same_value = p;
1651	  p->next_same_value = elt;
1652	  elt->first_same_value = classp;
1653	}
1654    }
1655  else
1656    elt->first_same_value = elt;
1657
1658  /* If this is a constant being set equivalent to a register or a register
1659     being set equivalent to a constant, note the constant equivalence.
1660
1661     If this is a constant, it cannot be equivalent to a different constant,
1662     and a constant is the only thing that can be cheaper than a register.  So
1663     we know the register is the head of the class (before the constant was
1664     inserted).
1665
1666     If this is a register that is not already known equivalent to a
1667     constant, we must check the entire class.
1668
1669     If this is a register that is already known equivalent to an insn,
1670     update the qtys `const_insn' to show that `this_insn' is the latest
1671     insn making that quantity equivalent to the constant.  */
1672
1673  if (elt->is_const && classp && REG_P (classp->exp)
1674      && !REG_P (x))
1675    {
1676      int exp_q = REG_QTY (REGNO (classp->exp));
1677      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1678
1679      exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1680      exp_ent->const_insn = this_insn;
1681    }
1682
1683  else if (REG_P (x)
1684	   && classp
1685	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1686	   && ! elt->is_const)
1687    {
1688      struct table_elt *p;
1689
1690      for (p = classp; p != 0; p = p->next_same_value)
1691	{
1692	  if (p->is_const && !REG_P (p->exp))
1693	    {
1694	      int x_q = REG_QTY (REGNO (x));
1695	      struct qty_table_elem *x_ent = &qty_table[x_q];
1696
1697	      x_ent->const_rtx
1698		= gen_lowpart (GET_MODE (x), p->exp);
1699	      x_ent->const_insn = this_insn;
1700	      break;
1701	    }
1702	}
1703    }
1704
1705  else if (REG_P (x)
1706	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1707	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1708    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1709
1710  /* If this is a constant with symbolic value,
1711     and it has a term with an explicit integer value,
1712     link it up with related expressions.  */
1713  if (GET_CODE (x) == CONST)
1714    {
1715      rtx subexp = get_related_value (x);
1716      unsigned subhash;
1717      struct table_elt *subelt, *subelt_prev;
1718
1719      if (subexp != 0)
1720	{
1721	  /* Get the integer-free subexpression in the hash table.  */
1722	  subhash = SAFE_HASH (subexp, mode);
1723	  subelt = lookup (subexp, subhash, mode);
1724	  if (subelt == 0)
1725	    subelt = insert (subexp, NULL, subhash, mode);
1726	  /* Initialize SUBELT's circular chain if it has none.  */
1727	  if (subelt->related_value == 0)
1728	    subelt->related_value = subelt;
1729	  /* Find the element in the circular chain that precedes SUBELT.  */
1730	  subelt_prev = subelt;
1731	  while (subelt_prev->related_value != subelt)
1732	    subelt_prev = subelt_prev->related_value;
1733	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1734	     This way the element that follows SUBELT is the oldest one.  */
1735	  elt->related_value = subelt_prev->related_value;
1736	  subelt_prev->related_value = elt;
1737	}
1738    }
1739
1740  return elt;
1741}
1742
1743/* Wrap insert_with_costs by passing the default costs.  */
1744
1745static struct table_elt *
1746insert (rtx x, struct table_elt *classp, unsigned int hash,
1747	enum machine_mode mode)
1748{
1749  return
1750    insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1751}
1752
1753
1754/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1755   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1756   the two classes equivalent.
1757
1758   CLASS1 will be the surviving class; CLASS2 should not be used after this
1759   call.
1760
1761   Any invalid entries in CLASS2 will not be copied.  */
1762
1763static void
1764merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1765{
1766  struct table_elt *elt, *next, *new_elt;
1767
1768  /* Ensure we start with the head of the classes.  */
1769  class1 = class1->first_same_value;
1770  class2 = class2->first_same_value;
1771
1772  /* If they were already equal, forget it.  */
1773  if (class1 == class2)
1774    return;
1775
1776  for (elt = class2; elt; elt = next)
1777    {
1778      unsigned int hash;
1779      rtx exp = elt->exp;
1780      enum machine_mode mode = elt->mode;
1781
1782      next = elt->next_same_value;
1783
1784      /* Remove old entry, make a new one in CLASS1's class.
1785	 Don't do this for invalid entries as we cannot find their
1786	 hash code (it also isn't necessary).  */
1787      if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1788	{
1789	  bool need_rehash = false;
1790
1791	  hash_arg_in_memory = 0;
1792	  hash = HASH (exp, mode);
1793
1794	  if (REG_P (exp))
1795	    {
1796	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1797	      delete_reg_equiv (REGNO (exp));
1798	    }
1799
1800	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1801	    remove_pseudo_from_table (exp, hash);
1802	  else
1803	    remove_from_table (elt, hash);
1804
1805	  if (insert_regs (exp, class1, 0) || need_rehash)
1806	    {
1807	      rehash_using_reg (exp);
1808	      hash = HASH (exp, mode);
1809	    }
1810	  new_elt = insert (exp, class1, hash, mode);
1811	  new_elt->in_memory = hash_arg_in_memory;
1812	}
1813    }
1814}
1815
1816/* Flush the entire hash table.  */
1817
1818static void
1819flush_hash_table (void)
1820{
1821  int i;
1822  struct table_elt *p;
1823
1824  for (i = 0; i < HASH_SIZE; i++)
1825    for (p = table[i]; p; p = table[i])
1826      {
1827	/* Note that invalidate can remove elements
1828	   after P in the current hash chain.  */
1829	if (REG_P (p->exp))
1830	  invalidate (p->exp, VOIDmode);
1831	else
1832	  remove_from_table (p, i);
1833      }
1834}
1835
1836/* Function called for each rtx to check whether true dependence exist.  */
1837struct check_dependence_data
1838{
1839  enum machine_mode mode;
1840  rtx exp;
1841  rtx addr;
1842};
1843
1844static int
1845check_dependence (rtx *x, void *data)
1846{
1847  struct check_dependence_data *d = (struct check_dependence_data *) data;
1848  if (*x && MEM_P (*x))
1849    return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX,
1850		    		  cse_rtx_varies_p);
1851  else
1852    return 0;
1853}
1854
1855/* Remove from the hash table, or mark as invalid, all expressions whose
1856   values could be altered by storing in X.  X is a register, a subreg, or
1857   a memory reference with nonvarying address (because, when a memory
1858   reference with a varying address is stored in, all memory references are
1859   removed by invalidate_memory so specific invalidation is superfluous).
1860   FULL_MODE, if not VOIDmode, indicates that this much should be
1861   invalidated instead of just the amount indicated by the mode of X.  This
1862   is only used for bitfield stores into memory.
1863
1864   A nonvarying address may be just a register or just a symbol reference,
1865   or it may be either of those plus a numeric offset.  */
1866
1867static void
1868invalidate (rtx x, enum machine_mode full_mode)
1869{
1870  int i;
1871  struct table_elt *p;
1872  rtx addr;
1873
1874  switch (GET_CODE (x))
1875    {
1876    case REG:
1877      {
1878	/* If X is a register, dependencies on its contents are recorded
1879	   through the qty number mechanism.  Just change the qty number of
1880	   the register, mark it as invalid for expressions that refer to it,
1881	   and remove it itself.  */
1882	unsigned int regno = REGNO (x);
1883	unsigned int hash = HASH (x, GET_MODE (x));
1884
1885	/* Remove REGNO from any quantity list it might be on and indicate
1886	   that its value might have changed.  If it is a pseudo, remove its
1887	   entry from the hash table.
1888
1889	   For a hard register, we do the first two actions above for any
1890	   additional hard registers corresponding to X.  Then, if any of these
1891	   registers are in the table, we must remove any REG entries that
1892	   overlap these registers.  */
1893
1894	delete_reg_equiv (regno);
1895	REG_TICK (regno)++;
1896	SUBREG_TICKED (regno) = -1;
1897
1898	if (regno >= FIRST_PSEUDO_REGISTER)
1899	  remove_pseudo_from_table (x, hash);
1900	else
1901	  {
1902	    HOST_WIDE_INT in_table
1903	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1904	    unsigned int endregno = END_HARD_REGNO (x);
1905	    unsigned int tregno, tendregno, rn;
1906	    struct table_elt *p, *next;
1907
1908	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1909
1910	    for (rn = regno + 1; rn < endregno; rn++)
1911	      {
1912		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1913		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1914		delete_reg_equiv (rn);
1915		REG_TICK (rn)++;
1916		SUBREG_TICKED (rn) = -1;
1917	      }
1918
1919	    if (in_table)
1920	      for (hash = 0; hash < HASH_SIZE; hash++)
1921		for (p = table[hash]; p; p = next)
1922		  {
1923		    next = p->next_same_hash;
1924
1925		    if (!REG_P (p->exp)
1926			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1927		      continue;
1928
1929		    tregno = REGNO (p->exp);
1930		    tendregno = END_HARD_REGNO (p->exp);
1931		    if (tendregno > regno && tregno < endregno)
1932		      remove_from_table (p, hash);
1933		  }
1934	  }
1935      }
1936      return;
1937
1938    case SUBREG:
1939      invalidate (SUBREG_REG (x), VOIDmode);
1940      return;
1941
1942    case PARALLEL:
1943      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1944	invalidate (XVECEXP (x, 0, i), VOIDmode);
1945      return;
1946
1947    case EXPR_LIST:
1948      /* This is part of a disjoint return value; extract the location in
1949	 question ignoring the offset.  */
1950      invalidate (XEXP (x, 0), VOIDmode);
1951      return;
1952
1953    case MEM:
1954      addr = canon_rtx (get_addr (XEXP (x, 0)));
1955      /* Calculate the canonical version of X here so that
1956	 true_dependence doesn't generate new RTL for X on each call.  */
1957      x = canon_rtx (x);
1958
1959      /* Remove all hash table elements that refer to overlapping pieces of
1960	 memory.  */
1961      if (full_mode == VOIDmode)
1962	full_mode = GET_MODE (x);
1963
1964      for (i = 0; i < HASH_SIZE; i++)
1965	{
1966	  struct table_elt *next;
1967
1968	  for (p = table[i]; p; p = next)
1969	    {
1970	      next = p->next_same_hash;
1971	      if (p->in_memory)
1972		{
1973		  struct check_dependence_data d;
1974
1975		  /* Just canonicalize the expression once;
1976		     otherwise each time we call invalidate
1977		     true_dependence will canonicalize the
1978		     expression again.  */
1979		  if (!p->canon_exp)
1980		    p->canon_exp = canon_rtx (p->exp);
1981		  d.exp = x;
1982		  d.addr = addr;
1983		  d.mode = full_mode;
1984		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1985		    remove_from_table (p, i);
1986		}
1987	    }
1988	}
1989      return;
1990
1991    default:
1992      gcc_unreachable ();
1993    }
1994}
1995
1996/* Remove all expressions that refer to register REGNO,
1997   since they are already invalid, and we are about to
1998   mark that register valid again and don't want the old
1999   expressions to reappear as valid.  */
2000
2001static void
2002remove_invalid_refs (unsigned int regno)
2003{
2004  unsigned int i;
2005  struct table_elt *p, *next;
2006
2007  for (i = 0; i < HASH_SIZE; i++)
2008    for (p = table[i]; p; p = next)
2009      {
2010	next = p->next_same_hash;
2011	if (!REG_P (p->exp)
2012	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2013	  remove_from_table (p, i);
2014      }
2015}
2016
2017/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2018   and mode MODE.  */
2019static void
2020remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
2021			    enum machine_mode mode)
2022{
2023  unsigned int i;
2024  struct table_elt *p, *next;
2025  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2026
2027  for (i = 0; i < HASH_SIZE; i++)
2028    for (p = table[i]; p; p = next)
2029      {
2030	rtx exp = p->exp;
2031	next = p->next_same_hash;
2032
2033	if (!REG_P (exp)
2034	    && (GET_CODE (exp) != SUBREG
2035		|| !REG_P (SUBREG_REG (exp))
2036		|| REGNO (SUBREG_REG (exp)) != regno
2037		|| (((SUBREG_BYTE (exp)
2038		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2039		    && SUBREG_BYTE (exp) <= end))
2040	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2041	  remove_from_table (p, i);
2042      }
2043}
2044
2045/* Recompute the hash codes of any valid entries in the hash table that
2046   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2047
2048   This is called when we make a jump equivalence.  */
2049
2050static void
2051rehash_using_reg (rtx x)
2052{
2053  unsigned int i;
2054  struct table_elt *p, *next;
2055  unsigned hash;
2056
2057  if (GET_CODE (x) == SUBREG)
2058    x = SUBREG_REG (x);
2059
2060  /* If X is not a register or if the register is known not to be in any
2061     valid entries in the table, we have no work to do.  */
2062
2063  if (!REG_P (x)
2064      || REG_IN_TABLE (REGNO (x)) < 0
2065      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2066    return;
2067
2068  /* Scan all hash chains looking for valid entries that mention X.
2069     If we find one and it is in the wrong hash chain, move it.  */
2070
2071  for (i = 0; i < HASH_SIZE; i++)
2072    for (p = table[i]; p; p = next)
2073      {
2074	next = p->next_same_hash;
2075	if (reg_mentioned_p (x, p->exp)
2076	    && exp_equiv_p (p->exp, p->exp, 1, false)
2077	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2078	  {
2079	    if (p->next_same_hash)
2080	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2081
2082	    if (p->prev_same_hash)
2083	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2084	    else
2085	      table[i] = p->next_same_hash;
2086
2087	    p->next_same_hash = table[hash];
2088	    p->prev_same_hash = 0;
2089	    if (table[hash])
2090	      table[hash]->prev_same_hash = p;
2091	    table[hash] = p;
2092	  }
2093      }
2094}
2095
2096/* Remove from the hash table any expression that is a call-clobbered
2097   register.  Also update their TICK values.  */
2098
2099static void
2100invalidate_for_call (void)
2101{
2102  unsigned int regno, endregno;
2103  unsigned int i;
2104  unsigned hash;
2105  struct table_elt *p, *next;
2106  int in_table = 0;
2107
2108  /* Go through all the hard registers.  For each that is clobbered in
2109     a CALL_INSN, remove the register from quantity chains and update
2110     reg_tick if defined.  Also see if any of these registers is currently
2111     in the table.  */
2112
2113  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2114    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2115      {
2116	delete_reg_equiv (regno);
2117	if (REG_TICK (regno) >= 0)
2118	  {
2119	    REG_TICK (regno)++;
2120	    SUBREG_TICKED (regno) = -1;
2121	  }
2122
2123	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2124      }
2125
2126  /* In the case where we have no call-clobbered hard registers in the
2127     table, we are done.  Otherwise, scan the table and remove any
2128     entry that overlaps a call-clobbered register.  */
2129
2130  if (in_table)
2131    for (hash = 0; hash < HASH_SIZE; hash++)
2132      for (p = table[hash]; p; p = next)
2133	{
2134	  next = p->next_same_hash;
2135
2136	  if (!REG_P (p->exp)
2137	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2138	    continue;
2139
2140	  regno = REGNO (p->exp);
2141	  endregno = END_HARD_REGNO (p->exp);
2142
2143	  for (i = regno; i < endregno; i++)
2144	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2145	      {
2146		remove_from_table (p, hash);
2147		break;
2148	      }
2149	}
2150}
2151
2152/* Given an expression X of type CONST,
2153   and ELT which is its table entry (or 0 if it
2154   is not in the hash table),
2155   return an alternate expression for X as a register plus integer.
2156   If none can be found, return 0.  */
2157
2158static rtx
2159use_related_value (rtx x, struct table_elt *elt)
2160{
2161  struct table_elt *relt = 0;
2162  struct table_elt *p, *q;
2163  HOST_WIDE_INT offset;
2164
2165  /* First, is there anything related known?
2166     If we have a table element, we can tell from that.
2167     Otherwise, must look it up.  */
2168
2169  if (elt != 0 && elt->related_value != 0)
2170    relt = elt;
2171  else if (elt == 0 && GET_CODE (x) == CONST)
2172    {
2173      rtx subexp = get_related_value (x);
2174      if (subexp != 0)
2175	relt = lookup (subexp,
2176		       SAFE_HASH (subexp, GET_MODE (subexp)),
2177		       GET_MODE (subexp));
2178    }
2179
2180  if (relt == 0)
2181    return 0;
2182
2183  /* Search all related table entries for one that has an
2184     equivalent register.  */
2185
2186  p = relt;
2187  while (1)
2188    {
2189      /* This loop is strange in that it is executed in two different cases.
2190	 The first is when X is already in the table.  Then it is searching
2191	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2192	 X is not in the table.  Then RELT points to a class for the related
2193	 value.
2194
2195	 Ensure that, whatever case we are in, that we ignore classes that have
2196	 the same value as X.  */
2197
2198      if (rtx_equal_p (x, p->exp))
2199	q = 0;
2200      else
2201	for (q = p->first_same_value; q; q = q->next_same_value)
2202	  if (REG_P (q->exp))
2203	    break;
2204
2205      if (q)
2206	break;
2207
2208      p = p->related_value;
2209
2210      /* We went all the way around, so there is nothing to be found.
2211	 Alternatively, perhaps RELT was in the table for some other reason
2212	 and it has no related values recorded.  */
2213      if (p == relt || p == 0)
2214	break;
2215    }
2216
2217  if (q == 0)
2218    return 0;
2219
2220  offset = (get_integer_term (x) - get_integer_term (p->exp));
2221  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2222  return plus_constant (q->exp, offset);
2223}
2224
2225
2226/* Hash a string.  Just add its bytes up.  */
2227static inline unsigned
2228hash_rtx_string (const char *ps)
2229{
2230  unsigned hash = 0;
2231  const unsigned char *p = (const unsigned char *) ps;
2232
2233  if (p)
2234    while (*p)
2235      hash += *p++;
2236
2237  return hash;
2238}
2239
2240/* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2241   When the callback returns true, we continue with the new rtx.  */
2242
2243unsigned
2244hash_rtx_cb (const_rtx x, enum machine_mode mode,
2245             int *do_not_record_p, int *hash_arg_in_memory_p,
2246             bool have_reg_qty, hash_rtx_callback_function cb)
2247{
2248  int i, j;
2249  unsigned hash = 0;
2250  enum rtx_code code;
2251  const char *fmt;
2252  enum machine_mode newmode;
2253  rtx newx;
2254
2255  /* Used to turn recursion into iteration.  We can't rely on GCC's
2256     tail-recursion elimination since we need to keep accumulating values
2257     in HASH.  */
2258 repeat:
2259  if (x == 0)
2260    return hash;
2261
2262  /* Invoke the callback first.  */
2263  if (cb != NULL
2264      && ((*cb) (x, mode, &newx, &newmode)))
2265    {
2266      hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2267                           hash_arg_in_memory_p, have_reg_qty, cb);
2268      return hash;
2269    }
2270
2271  code = GET_CODE (x);
2272  switch (code)
2273    {
2274    case REG:
2275      {
2276	unsigned int regno = REGNO (x);
2277
2278	if (do_not_record_p && !reload_completed)
2279	  {
2280	    /* On some machines, we can't record any non-fixed hard register,
2281	       because extending its life will cause reload problems.  We
2282	       consider ap, fp, sp, gp to be fixed for this purpose.
2283
2284	       We also consider CCmode registers to be fixed for this purpose;
2285	       failure to do so leads to failure to simplify 0<100 type of
2286	       conditionals.
2287
2288	       On all machines, we can't record any global registers.
2289	       Nor should we record any register that is in a small
2290	       class, as defined by CLASS_LIKELY_SPILLED_P.  */
2291	    bool record;
2292
2293	    if (regno >= FIRST_PSEUDO_REGISTER)
2294	      record = true;
2295	    else if (x == frame_pointer_rtx
2296		     || x == hard_frame_pointer_rtx
2297		     || x == arg_pointer_rtx
2298		     || x == stack_pointer_rtx
2299		     || x == pic_offset_table_rtx)
2300	      record = true;
2301	    else if (global_regs[regno])
2302	      record = false;
2303	    else if (fixed_regs[regno])
2304	      record = true;
2305	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2306	      record = true;
2307	    else if (SMALL_REGISTER_CLASSES)
2308	      record = false;
2309	    else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2310	      record = false;
2311	    else
2312	      record = true;
2313
2314	    if (!record)
2315	      {
2316		*do_not_record_p = 1;
2317		return 0;
2318	      }
2319	  }
2320
2321	hash += ((unsigned int) REG << 7);
2322        hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2323	return hash;
2324      }
2325
2326    /* We handle SUBREG of a REG specially because the underlying
2327       reg changes its hash value with every value change; we don't
2328       want to have to forget unrelated subregs when one subreg changes.  */
2329    case SUBREG:
2330      {
2331	if (REG_P (SUBREG_REG (x)))
2332	  {
2333	    hash += (((unsigned int) SUBREG << 7)
2334		     + REGNO (SUBREG_REG (x))
2335		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2336	    return hash;
2337	  }
2338	break;
2339      }
2340
2341    case CONST_INT:
2342      hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2343               + (unsigned int) INTVAL (x));
2344      return hash;
2345
2346    case CONST_DOUBLE:
2347      /* This is like the general case, except that it only counts
2348	 the integers representing the constant.  */
2349      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2350      if (GET_MODE (x) != VOIDmode)
2351	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2352      else
2353	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2354		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2355      return hash;
2356
2357    case CONST_FIXED:
2358      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2359      hash += fixed_hash (CONST_FIXED_VALUE (x));
2360      return hash;
2361
2362    case CONST_VECTOR:
2363      {
2364	int units;
2365	rtx elt;
2366
2367	units = CONST_VECTOR_NUNITS (x);
2368
2369	for (i = 0; i < units; ++i)
2370	  {
2371	    elt = CONST_VECTOR_ELT (x, i);
2372	    hash += hash_rtx_cb (elt, GET_MODE (elt),
2373                                 do_not_record_p, hash_arg_in_memory_p,
2374                                 have_reg_qty, cb);
2375	  }
2376
2377	return hash;
2378      }
2379
2380      /* Assume there is only one rtx object for any given label.  */
2381    case LABEL_REF:
2382      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2383	 differences and differences between each stage's debugging dumps.  */
2384	 hash += (((unsigned int) LABEL_REF << 7)
2385		  + CODE_LABEL_NUMBER (XEXP (x, 0)));
2386      return hash;
2387
2388    case SYMBOL_REF:
2389      {
2390	/* Don't hash on the symbol's address to avoid bootstrap differences.
2391	   Different hash values may cause expressions to be recorded in
2392	   different orders and thus different registers to be used in the
2393	   final assembler.  This also avoids differences in the dump files
2394	   between various stages.  */
2395	unsigned int h = 0;
2396	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2397
2398	while (*p)
2399	  h += (h << 7) + *p++; /* ??? revisit */
2400
2401	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2402	return hash;
2403      }
2404
2405    case MEM:
2406      /* We don't record if marked volatile or if BLKmode since we don't
2407	 know the size of the move.  */
2408      if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2409	{
2410	  *do_not_record_p = 1;
2411	  return 0;
2412	}
2413      if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2414	*hash_arg_in_memory_p = 1;
2415
2416      /* Now that we have already found this special case,
2417	 might as well speed it up as much as possible.  */
2418      hash += (unsigned) MEM;
2419      x = XEXP (x, 0);
2420      goto repeat;
2421
2422    case USE:
2423      /* A USE that mentions non-volatile memory needs special
2424	 handling since the MEM may be BLKmode which normally
2425	 prevents an entry from being made.  Pure calls are
2426	 marked by a USE which mentions BLKmode memory.
2427	 See calls.c:emit_call_1.  */
2428      if (MEM_P (XEXP (x, 0))
2429	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2430	{
2431	  hash += (unsigned) USE;
2432	  x = XEXP (x, 0);
2433
2434	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2435	    *hash_arg_in_memory_p = 1;
2436
2437	  /* Now that we have already found this special case,
2438	     might as well speed it up as much as possible.  */
2439	  hash += (unsigned) MEM;
2440	  x = XEXP (x, 0);
2441	  goto repeat;
2442	}
2443      break;
2444
2445    case PRE_DEC:
2446    case PRE_INC:
2447    case POST_DEC:
2448    case POST_INC:
2449    case PRE_MODIFY:
2450    case POST_MODIFY:
2451    case PC:
2452    case CC0:
2453    case CALL:
2454    case UNSPEC_VOLATILE:
2455      if (do_not_record_p) {
2456        *do_not_record_p = 1;
2457        return 0;
2458      }
2459      else
2460        return hash;
2461      break;
2462
2463    case ASM_OPERANDS:
2464      if (do_not_record_p && MEM_VOLATILE_P (x))
2465	{
2466	  *do_not_record_p = 1;
2467	  return 0;
2468	}
2469      else
2470	{
2471	  /* We don't want to take the filename and line into account.  */
2472	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2473	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2474	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2475	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2476
2477	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2478	    {
2479	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2480		{
2481		  hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2482                                        GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2483                                        do_not_record_p, hash_arg_in_memory_p,
2484                                        have_reg_qty, cb)
2485			   + hash_rtx_string
2486                           (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2487		}
2488
2489	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2490	      x = ASM_OPERANDS_INPUT (x, 0);
2491	      mode = GET_MODE (x);
2492	      goto repeat;
2493	    }
2494
2495	  return hash;
2496	}
2497      break;
2498
2499    default:
2500      break;
2501    }
2502
2503  i = GET_RTX_LENGTH (code) - 1;
2504  hash += (unsigned) code + (unsigned) GET_MODE (x);
2505  fmt = GET_RTX_FORMAT (code);
2506  for (; i >= 0; i--)
2507    {
2508      switch (fmt[i])
2509	{
2510	case 'e':
2511	  /* If we are about to do the last recursive call
2512	     needed at this level, change it into iteration.
2513	     This function  is called enough to be worth it.  */
2514	  if (i == 0)
2515	    {
2516	      x = XEXP (x, i);
2517	      goto repeat;
2518	    }
2519
2520	  hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2521                               hash_arg_in_memory_p,
2522                               have_reg_qty, cb);
2523	  break;
2524
2525	case 'E':
2526	  for (j = 0; j < XVECLEN (x, i); j++)
2527	    hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2528                                 hash_arg_in_memory_p,
2529                                 have_reg_qty, cb);
2530	  break;
2531
2532	case 's':
2533	  hash += hash_rtx_string (XSTR (x, i));
2534	  break;
2535
2536	case 'i':
2537	  hash += (unsigned int) XINT (x, i);
2538	  break;
2539
2540	case '0': case 't':
2541	  /* Unused.  */
2542	  break;
2543
2544	default:
2545	  gcc_unreachable ();
2546	}
2547    }
2548
2549  return hash;
2550}
2551
2552/* Hash an rtx.  We are careful to make sure the value is never negative.
2553   Equivalent registers hash identically.
2554   MODE is used in hashing for CONST_INTs only;
2555   otherwise the mode of X is used.
2556
2557   Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2558
2559   If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2560   a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2561
2562   Note that cse_insn knows that the hash code of a MEM expression
2563   is just (int) MEM plus the hash code of the address.  */
2564
2565unsigned
2566hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
2567	  int *hash_arg_in_memory_p, bool have_reg_qty)
2568{
2569  return hash_rtx_cb (x, mode, do_not_record_p,
2570                      hash_arg_in_memory_p, have_reg_qty, NULL);
2571}
2572
2573/* Hash an rtx X for cse via hash_rtx.
2574   Stores 1 in do_not_record if any subexpression is volatile.
2575   Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2576   does not have the RTX_UNCHANGING_P bit set.  */
2577
2578static inline unsigned
2579canon_hash (rtx x, enum machine_mode mode)
2580{
2581  return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2582}
2583
2584/* Like canon_hash but with no side effects, i.e. do_not_record
2585   and hash_arg_in_memory are not changed.  */
2586
2587static inline unsigned
2588safe_hash (rtx x, enum machine_mode mode)
2589{
2590  int dummy_do_not_record;
2591  return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2592}
2593
2594/* Return 1 iff X and Y would canonicalize into the same thing,
2595   without actually constructing the canonicalization of either one.
2596   If VALIDATE is nonzero,
2597   we assume X is an expression being processed from the rtl
2598   and Y was found in the hash table.  We check register refs
2599   in Y for being marked as valid.
2600
2601   If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2602
2603int
2604exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2605{
2606  int i, j;
2607  enum rtx_code code;
2608  const char *fmt;
2609
2610  /* Note: it is incorrect to assume an expression is equivalent to itself
2611     if VALIDATE is nonzero.  */
2612  if (x == y && !validate)
2613    return 1;
2614
2615  if (x == 0 || y == 0)
2616    return x == y;
2617
2618  code = GET_CODE (x);
2619  if (code != GET_CODE (y))
2620    return 0;
2621
2622  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2623  if (GET_MODE (x) != GET_MODE (y))
2624    return 0;
2625
2626  /* MEMs refering to different address space are not equivalent.  */
2627  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2628    return 0;
2629
2630  switch (code)
2631    {
2632    case PC:
2633    case CC0:
2634    case CONST_INT:
2635    case CONST_DOUBLE:
2636    case CONST_FIXED:
2637      return x == y;
2638
2639    case LABEL_REF:
2640      return XEXP (x, 0) == XEXP (y, 0);
2641
2642    case SYMBOL_REF:
2643      return XSTR (x, 0) == XSTR (y, 0);
2644
2645    case REG:
2646      if (for_gcse)
2647	return REGNO (x) == REGNO (y);
2648      else
2649	{
2650	  unsigned int regno = REGNO (y);
2651	  unsigned int i;
2652	  unsigned int endregno = END_REGNO (y);
2653
2654	  /* If the quantities are not the same, the expressions are not
2655	     equivalent.  If there are and we are not to validate, they
2656	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2657
2658	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2659	    return 0;
2660
2661	  if (! validate)
2662	    return 1;
2663
2664	  for (i = regno; i < endregno; i++)
2665	    if (REG_IN_TABLE (i) != REG_TICK (i))
2666	      return 0;
2667
2668	  return 1;
2669	}
2670
2671    case MEM:
2672      if (for_gcse)
2673	{
2674	  /* A volatile mem should not be considered equivalent to any
2675	     other.  */
2676	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2677	    return 0;
2678
2679	  /* Can't merge two expressions in different alias sets, since we
2680	     can decide that the expression is transparent in a block when
2681	     it isn't, due to it being set with the different alias set.
2682
2683	     Also, can't merge two expressions with different MEM_ATTRS.
2684	     They could e.g. be two different entities allocated into the
2685	     same space on the stack (see e.g. PR25130).  In that case, the
2686	     MEM addresses can be the same, even though the two MEMs are
2687	     absolutely not equivalent.
2688
2689	     But because really all MEM attributes should be the same for
2690	     equivalent MEMs, we just use the invariant that MEMs that have
2691	     the same attributes share the same mem_attrs data structure.  */
2692	  if (MEM_ATTRS (x) != MEM_ATTRS (y))
2693	    return 0;
2694	}
2695      break;
2696
2697    /*  For commutative operations, check both orders.  */
2698    case PLUS:
2699    case MULT:
2700    case AND:
2701    case IOR:
2702    case XOR:
2703    case NE:
2704    case EQ:
2705      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2706			     validate, for_gcse)
2707	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2708				validate, for_gcse))
2709	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2710				validate, for_gcse)
2711		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2712				   validate, for_gcse)));
2713
2714    case ASM_OPERANDS:
2715      /* We don't use the generic code below because we want to
2716	 disregard filename and line numbers.  */
2717
2718      /* A volatile asm isn't equivalent to any other.  */
2719      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2720	return 0;
2721
2722      if (GET_MODE (x) != GET_MODE (y)
2723	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2724	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2725		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2726	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2727	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2728	return 0;
2729
2730      if (ASM_OPERANDS_INPUT_LENGTH (x))
2731	{
2732	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2733	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2734			       ASM_OPERANDS_INPUT (y, i),
2735			       validate, for_gcse)
2736		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2737			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2738	      return 0;
2739	}
2740
2741      return 1;
2742
2743    default:
2744      break;
2745    }
2746
2747  /* Compare the elements.  If any pair of corresponding elements
2748     fail to match, return 0 for the whole thing.  */
2749
2750  fmt = GET_RTX_FORMAT (code);
2751  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2752    {
2753      switch (fmt[i])
2754	{
2755	case 'e':
2756	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2757			      validate, for_gcse))
2758	    return 0;
2759	  break;
2760
2761	case 'E':
2762	  if (XVECLEN (x, i) != XVECLEN (y, i))
2763	    return 0;
2764	  for (j = 0; j < XVECLEN (x, i); j++)
2765	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2766				validate, for_gcse))
2767	      return 0;
2768	  break;
2769
2770	case 's':
2771	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2772	    return 0;
2773	  break;
2774
2775	case 'i':
2776	  if (XINT (x, i) != XINT (y, i))
2777	    return 0;
2778	  break;
2779
2780	case 'w':
2781	  if (XWINT (x, i) != XWINT (y, i))
2782	    return 0;
2783	  break;
2784
2785	case '0':
2786	case 't':
2787	  break;
2788
2789	default:
2790	  gcc_unreachable ();
2791	}
2792    }
2793
2794  return 1;
2795}
2796
2797/* Return 1 if X has a value that can vary even between two
2798   executions of the program.  0 means X can be compared reliably
2799   against certain constants or near-constants.  */
2800
2801static bool
2802cse_rtx_varies_p (const_rtx x, bool from_alias)
2803{
2804  /* We need not check for X and the equivalence class being of the same
2805     mode because if X is equivalent to a constant in some mode, it
2806     doesn't vary in any mode.  */
2807
2808  if (REG_P (x)
2809      && REGNO_QTY_VALID_P (REGNO (x)))
2810    {
2811      int x_q = REG_QTY (REGNO (x));
2812      struct qty_table_elem *x_ent = &qty_table[x_q];
2813
2814      if (GET_MODE (x) == x_ent->mode
2815	  && x_ent->const_rtx != NULL_RTX)
2816	return 0;
2817    }
2818
2819  if (GET_CODE (x) == PLUS
2820      && CONST_INT_P (XEXP (x, 1))
2821      && REG_P (XEXP (x, 0))
2822      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2823    {
2824      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2825      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2826
2827      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2828	  && x0_ent->const_rtx != NULL_RTX)
2829	return 0;
2830    }
2831
2832  /* This can happen as the result of virtual register instantiation, if
2833     the initial constant is too large to be a valid address.  This gives
2834     us a three instruction sequence, load large offset into a register,
2835     load fp minus a constant into a register, then a MEM which is the
2836     sum of the two `constant' registers.  */
2837  if (GET_CODE (x) == PLUS
2838      && REG_P (XEXP (x, 0))
2839      && REG_P (XEXP (x, 1))
2840      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2841      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2842    {
2843      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2844      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2845      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2846      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2847
2848      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2849	  && x0_ent->const_rtx != NULL_RTX
2850	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2851	  && x1_ent->const_rtx != NULL_RTX)
2852	return 0;
2853    }
2854
2855  return rtx_varies_p (x, from_alias);
2856}
2857
2858/* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2859   the result if necessary.  INSN is as for canon_reg.  */
2860
2861static void
2862validate_canon_reg (rtx *xloc, rtx insn)
2863{
2864  if (*xloc)
2865    {
2866      rtx new_rtx = canon_reg (*xloc, insn);
2867
2868      /* If replacing pseudo with hard reg or vice versa, ensure the
2869         insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2870      gcc_assert (insn && new_rtx);
2871      validate_change (insn, xloc, new_rtx, 1);
2872    }
2873}
2874
2875/* Canonicalize an expression:
2876   replace each register reference inside it
2877   with the "oldest" equivalent register.
2878
2879   If INSN is nonzero validate_change is used to ensure that INSN remains valid
2880   after we make our substitution.  The calls are made with IN_GROUP nonzero
2881   so apply_change_group must be called upon the outermost return from this
2882   function (unless INSN is zero).  The result of apply_change_group can
2883   generally be discarded since the changes we are making are optional.  */
2884
2885static rtx
2886canon_reg (rtx x, rtx insn)
2887{
2888  int i;
2889  enum rtx_code code;
2890  const char *fmt;
2891
2892  if (x == 0)
2893    return x;
2894
2895  code = GET_CODE (x);
2896  switch (code)
2897    {
2898    case PC:
2899    case CC0:
2900    case CONST:
2901    case CONST_INT:
2902    case CONST_DOUBLE:
2903    case CONST_FIXED:
2904    case CONST_VECTOR:
2905    case SYMBOL_REF:
2906    case LABEL_REF:
2907    case ADDR_VEC:
2908    case ADDR_DIFF_VEC:
2909      return x;
2910
2911    case REG:
2912      {
2913	int first;
2914	int q;
2915	struct qty_table_elem *ent;
2916
2917	/* Never replace a hard reg, because hard regs can appear
2918	   in more than one machine mode, and we must preserve the mode
2919	   of each occurrence.  Also, some hard regs appear in
2920	   MEMs that are shared and mustn't be altered.  Don't try to
2921	   replace any reg that maps to a reg of class NO_REGS.  */
2922	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2923	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2924	  return x;
2925
2926	q = REG_QTY (REGNO (x));
2927	ent = &qty_table[q];
2928	first = ent->first_reg;
2929	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2930		: REGNO_REG_CLASS (first) == NO_REGS ? x
2931		: gen_rtx_REG (ent->mode, first));
2932      }
2933
2934    default:
2935      break;
2936    }
2937
2938  fmt = GET_RTX_FORMAT (code);
2939  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2940    {
2941      int j;
2942
2943      if (fmt[i] == 'e')
2944	validate_canon_reg (&XEXP (x, i), insn);
2945      else if (fmt[i] == 'E')
2946	for (j = 0; j < XVECLEN (x, i); j++)
2947	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2948    }
2949
2950  return x;
2951}
2952
2953/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2954   operation (EQ, NE, GT, etc.), follow it back through the hash table and
2955   what values are being compared.
2956
2957   *PARG1 and *PARG2 are updated to contain the rtx representing the values
2958   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
2959   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2960   compared to produce cc0.
2961
2962   The return value is the comparison operator and is either the code of
2963   A or the code corresponding to the inverse of the comparison.  */
2964
2965static enum rtx_code
2966find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2967		      enum machine_mode *pmode1, enum machine_mode *pmode2)
2968{
2969  rtx arg1, arg2;
2970
2971  arg1 = *parg1, arg2 = *parg2;
2972
2973  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
2974
2975  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2976    {
2977      /* Set nonzero when we find something of interest.  */
2978      rtx x = 0;
2979      int reverse_code = 0;
2980      struct table_elt *p = 0;
2981
2982      /* If arg1 is a COMPARE, extract the comparison arguments from it.
2983	 On machines with CC0, this is the only case that can occur, since
2984	 fold_rtx will return the COMPARE or item being compared with zero
2985	 when given CC0.  */
2986
2987      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2988	x = arg1;
2989
2990      /* If ARG1 is a comparison operator and CODE is testing for
2991	 STORE_FLAG_VALUE, get the inner arguments.  */
2992
2993      else if (COMPARISON_P (arg1))
2994	{
2995#ifdef FLOAT_STORE_FLAG_VALUE
2996	  REAL_VALUE_TYPE fsfv;
2997#endif
2998
2999	  if (code == NE
3000	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3001		  && code == LT && STORE_FLAG_VALUE == -1)
3002#ifdef FLOAT_STORE_FLAG_VALUE
3003	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3004		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3005		      REAL_VALUE_NEGATIVE (fsfv)))
3006#endif
3007	      )
3008	    x = arg1;
3009	  else if (code == EQ
3010		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3011		       && code == GE && STORE_FLAG_VALUE == -1)
3012#ifdef FLOAT_STORE_FLAG_VALUE
3013		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3014		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3015			   REAL_VALUE_NEGATIVE (fsfv)))
3016#endif
3017		   )
3018	    x = arg1, reverse_code = 1;
3019	}
3020
3021      /* ??? We could also check for
3022
3023	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3024
3025	 and related forms, but let's wait until we see them occurring.  */
3026
3027      if (x == 0)
3028	/* Look up ARG1 in the hash table and see if it has an equivalence
3029	   that lets us see what is being compared.  */
3030	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3031      if (p)
3032	{
3033	  p = p->first_same_value;
3034
3035	  /* If what we compare is already known to be constant, that is as
3036	     good as it gets.
3037	     We need to break the loop in this case, because otherwise we
3038	     can have an infinite loop when looking at a reg that is known
3039	     to be a constant which is the same as a comparison of a reg
3040	     against zero which appears later in the insn stream, which in
3041	     turn is constant and the same as the comparison of the first reg
3042	     against zero...  */
3043	  if (p->is_const)
3044	    break;
3045	}
3046
3047      for (; p; p = p->next_same_value)
3048	{
3049	  enum machine_mode inner_mode = GET_MODE (p->exp);
3050#ifdef FLOAT_STORE_FLAG_VALUE
3051	  REAL_VALUE_TYPE fsfv;
3052#endif
3053
3054	  /* If the entry isn't valid, skip it.  */
3055	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3056	    continue;
3057
3058	  if (GET_CODE (p->exp) == COMPARE
3059	      /* Another possibility is that this machine has a compare insn
3060		 that includes the comparison code.  In that case, ARG1 would
3061		 be equivalent to a comparison operation that would set ARG1 to
3062		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3063		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3064		 we must reverse ORIG_CODE.  On machine with a negative value
3065		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3066	      || ((code == NE
3067		   || (code == LT
3068		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3069		       && (GET_MODE_BITSIZE (inner_mode)
3070			   <= HOST_BITS_PER_WIDE_INT)
3071		       && (STORE_FLAG_VALUE
3072			   & ((HOST_WIDE_INT) 1
3073			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3074#ifdef FLOAT_STORE_FLAG_VALUE
3075		   || (code == LT
3076		       && SCALAR_FLOAT_MODE_P (inner_mode)
3077		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3078			   REAL_VALUE_NEGATIVE (fsfv)))
3079#endif
3080		   )
3081		  && COMPARISON_P (p->exp)))
3082	    {
3083	      x = p->exp;
3084	      break;
3085	    }
3086	  else if ((code == EQ
3087		    || (code == GE
3088			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3089			&& (GET_MODE_BITSIZE (inner_mode)
3090			    <= HOST_BITS_PER_WIDE_INT)
3091			&& (STORE_FLAG_VALUE
3092			    & ((HOST_WIDE_INT) 1
3093			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3094#ifdef FLOAT_STORE_FLAG_VALUE
3095		    || (code == GE
3096			&& SCALAR_FLOAT_MODE_P (inner_mode)
3097			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3098			    REAL_VALUE_NEGATIVE (fsfv)))
3099#endif
3100		    )
3101		   && COMPARISON_P (p->exp))
3102	    {
3103	      reverse_code = 1;
3104	      x = p->exp;
3105	      break;
3106	    }
3107
3108	  /* If this non-trapping address, e.g. fp + constant, the
3109	     equivalent is a better operand since it may let us predict
3110	     the value of the comparison.  */
3111	  else if (!rtx_addr_can_trap_p (p->exp))
3112	    {
3113	      arg1 = p->exp;
3114	      continue;
3115	    }
3116	}
3117
3118      /* If we didn't find a useful equivalence for ARG1, we are done.
3119	 Otherwise, set up for the next iteration.  */
3120      if (x == 0)
3121	break;
3122
3123      /* If we need to reverse the comparison, make sure that that is
3124	 possible -- we can't necessarily infer the value of GE from LT
3125	 with floating-point operands.  */
3126      if (reverse_code)
3127	{
3128	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3129	  if (reversed == UNKNOWN)
3130	    break;
3131	  else
3132	    code = reversed;
3133	}
3134      else if (COMPARISON_P (x))
3135	code = GET_CODE (x);
3136      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3137    }
3138
3139  /* Return our results.  Return the modes from before fold_rtx
3140     because fold_rtx might produce const_int, and then it's too late.  */
3141  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3142  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3143
3144  return code;
3145}
3146
3147/* If X is a nontrivial arithmetic operation on an argument for which
3148   a constant value can be determined, return the result of operating
3149   on that value, as a constant.  Otherwise, return X, possibly with
3150   one or more operands changed to a forward-propagated constant.
3151
3152   If X is a register whose contents are known, we do NOT return
3153   those contents here; equiv_constant is called to perform that task.
3154   For SUBREGs and MEMs, we do that both here and in equiv_constant.
3155
3156   INSN is the insn that we may be modifying.  If it is 0, make a copy
3157   of X before modifying it.  */
3158
3159static rtx
3160fold_rtx (rtx x, rtx insn)
3161{
3162  enum rtx_code code;
3163  enum machine_mode mode;
3164  const char *fmt;
3165  int i;
3166  rtx new_rtx = 0;
3167  int changed = 0;
3168
3169  /* Operands of X.  */
3170  rtx folded_arg0;
3171  rtx folded_arg1;
3172
3173  /* Constant equivalents of first three operands of X;
3174     0 when no such equivalent is known.  */
3175  rtx const_arg0;
3176  rtx const_arg1;
3177  rtx const_arg2;
3178
3179  /* The mode of the first operand of X.  We need this for sign and zero
3180     extends.  */
3181  enum machine_mode mode_arg0;
3182
3183  if (x == 0)
3184    return x;
3185
3186  /* Try to perform some initial simplifications on X.  */
3187  code = GET_CODE (x);
3188  switch (code)
3189    {
3190    case MEM:
3191    case SUBREG:
3192      if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3193        return new_rtx;
3194      return x;
3195
3196    case CONST:
3197    case CONST_INT:
3198    case CONST_DOUBLE:
3199    case CONST_FIXED:
3200    case CONST_VECTOR:
3201    case SYMBOL_REF:
3202    case LABEL_REF:
3203    case REG:
3204    case PC:
3205      /* No use simplifying an EXPR_LIST
3206	 since they are used only for lists of args
3207	 in a function call's REG_EQUAL note.  */
3208    case EXPR_LIST:
3209      return x;
3210
3211#ifdef HAVE_cc0
3212    case CC0:
3213      return prev_insn_cc0;
3214#endif
3215
3216    case ASM_OPERANDS:
3217      if (insn)
3218	{
3219	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3220	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3221			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3222	}
3223      return x;
3224
3225#ifdef NO_FUNCTION_CSE
3226    case CALL:
3227      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3228	return x;
3229      break;
3230#endif
3231
3232    /* Anything else goes through the loop below.  */
3233    default:
3234      break;
3235    }
3236
3237  mode = GET_MODE (x);
3238  const_arg0 = 0;
3239  const_arg1 = 0;
3240  const_arg2 = 0;
3241  mode_arg0 = VOIDmode;
3242
3243  /* Try folding our operands.
3244     Then see which ones have constant values known.  */
3245
3246  fmt = GET_RTX_FORMAT (code);
3247  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3248    if (fmt[i] == 'e')
3249      {
3250	rtx folded_arg = XEXP (x, i), const_arg;
3251	enum machine_mode mode_arg = GET_MODE (folded_arg);
3252
3253	switch (GET_CODE (folded_arg))
3254	  {
3255	  case MEM:
3256	  case REG:
3257	  case SUBREG:
3258	    const_arg = equiv_constant (folded_arg);
3259	    break;
3260
3261	  case CONST:
3262	  case CONST_INT:
3263	  case SYMBOL_REF:
3264	  case LABEL_REF:
3265	  case CONST_DOUBLE:
3266	  case CONST_FIXED:
3267	  case CONST_VECTOR:
3268	    const_arg = folded_arg;
3269	    break;
3270
3271#ifdef HAVE_cc0
3272	  case CC0:
3273	    folded_arg = prev_insn_cc0;
3274	    mode_arg = prev_insn_cc0_mode;
3275	    const_arg = equiv_constant (folded_arg);
3276	    break;
3277#endif
3278
3279	  default:
3280	    folded_arg = fold_rtx (folded_arg, insn);
3281	    const_arg = equiv_constant (folded_arg);
3282	    break;
3283	  }
3284
3285	/* For the first three operands, see if the operand
3286	   is constant or equivalent to a constant.  */
3287	switch (i)
3288	  {
3289	  case 0:
3290	    folded_arg0 = folded_arg;
3291	    const_arg0 = const_arg;
3292	    mode_arg0 = mode_arg;
3293	    break;
3294	  case 1:
3295	    folded_arg1 = folded_arg;
3296	    const_arg1 = const_arg;
3297	    break;
3298	  case 2:
3299	    const_arg2 = const_arg;
3300	    break;
3301	  }
3302
3303	/* Pick the least expensive of the argument and an equivalent constant
3304	   argument.  */
3305	if (const_arg != 0
3306	    && const_arg != folded_arg
3307	    && COST_IN (const_arg, code) <= COST_IN (folded_arg, code)
3308
3309	    /* It's not safe to substitute the operand of a conversion
3310	       operator with a constant, as the conversion's identity
3311	       depends upon the mode of its operand.  This optimization
3312	       is handled by the call to simplify_unary_operation.  */
3313	    && (GET_RTX_CLASS (code) != RTX_UNARY
3314		|| GET_MODE (const_arg) == mode_arg0
3315		|| (code != ZERO_EXTEND
3316		    && code != SIGN_EXTEND
3317		    && code != TRUNCATE
3318		    && code != FLOAT_TRUNCATE
3319		    && code != FLOAT_EXTEND
3320		    && code != FLOAT
3321		    && code != FIX
3322		    && code != UNSIGNED_FLOAT
3323		    && code != UNSIGNED_FIX)))
3324	  folded_arg = const_arg;
3325
3326	if (folded_arg == XEXP (x, i))
3327	  continue;
3328
3329	if (insn == NULL_RTX && !changed)
3330	  x = copy_rtx (x);
3331	changed = 1;
3332	validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3333      }
3334
3335  if (changed)
3336    {
3337      /* Canonicalize X if necessary, and keep const_argN and folded_argN
3338	 consistent with the order in X.  */
3339      if (canonicalize_change_group (insn, x))
3340	{
3341	  rtx tem;
3342	  tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3343	  tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3344	}
3345
3346      apply_change_group ();
3347    }
3348
3349  /* If X is an arithmetic operation, see if we can simplify it.  */
3350
3351  switch (GET_RTX_CLASS (code))
3352    {
3353    case RTX_UNARY:
3354      {
3355	/* We can't simplify extension ops unless we know the
3356	   original mode.  */
3357	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3358	    && mode_arg0 == VOIDmode)
3359	  break;
3360
3361	new_rtx = simplify_unary_operation (code, mode,
3362					const_arg0 ? const_arg0 : folded_arg0,
3363					mode_arg0);
3364      }
3365      break;
3366
3367    case RTX_COMPARE:
3368    case RTX_COMM_COMPARE:
3369      /* See what items are actually being compared and set FOLDED_ARG[01]
3370	 to those values and CODE to the actual comparison code.  If any are
3371	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3372	 do anything if both operands are already known to be constant.  */
3373
3374      /* ??? Vector mode comparisons are not supported yet.  */
3375      if (VECTOR_MODE_P (mode))
3376	break;
3377
3378      if (const_arg0 == 0 || const_arg1 == 0)
3379	{
3380	  struct table_elt *p0, *p1;
3381	  rtx true_rtx, false_rtx;
3382	  enum machine_mode mode_arg1;
3383
3384	  if (SCALAR_FLOAT_MODE_P (mode))
3385	    {
3386#ifdef FLOAT_STORE_FLAG_VALUE
3387	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3388			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3389#else
3390	      true_rtx = NULL_RTX;
3391#endif
3392	      false_rtx = CONST0_RTX (mode);
3393	    }
3394	  else
3395	    {
3396	      true_rtx = const_true_rtx;
3397	      false_rtx = const0_rtx;
3398	    }
3399
3400	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3401				       &mode_arg0, &mode_arg1);
3402
3403	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3404	     what kinds of things are being compared, so we can't do
3405	     anything with this comparison.  */
3406
3407	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3408	    break;
3409
3410	  const_arg0 = equiv_constant (folded_arg0);
3411	  const_arg1 = equiv_constant (folded_arg1);
3412
3413	  /* If we do not now have two constants being compared, see
3414	     if we can nevertheless deduce some things about the
3415	     comparison.  */
3416	  if (const_arg0 == 0 || const_arg1 == 0)
3417	    {
3418	      if (const_arg1 != NULL)
3419		{
3420		  rtx cheapest_simplification;
3421		  int cheapest_cost;
3422		  rtx simp_result;
3423		  struct table_elt *p;
3424
3425		  /* See if we can find an equivalent of folded_arg0
3426		     that gets us a cheaper expression, possibly a
3427		     constant through simplifications.  */
3428		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3429			      mode_arg0);
3430
3431		  if (p != NULL)
3432		    {
3433		      cheapest_simplification = x;
3434		      cheapest_cost = COST (x);
3435
3436		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3437			{
3438			  int cost;
3439
3440			  /* If the entry isn't valid, skip it.  */
3441			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3442			    continue;
3443
3444			  /* Try to simplify using this equivalence.  */
3445			  simp_result
3446			    = simplify_relational_operation (code, mode,
3447							     mode_arg0,
3448							     p->exp,
3449							     const_arg1);
3450
3451			  if (simp_result == NULL)
3452			    continue;
3453
3454			  cost = COST (simp_result);
3455			  if (cost < cheapest_cost)
3456			    {
3457			      cheapest_cost = cost;
3458			      cheapest_simplification = simp_result;
3459			    }
3460			}
3461
3462		      /* If we have a cheaper expression now, use that
3463			 and try folding it further, from the top.  */
3464		      if (cheapest_simplification != x)
3465			return fold_rtx (copy_rtx (cheapest_simplification),
3466					 insn);
3467		    }
3468		}
3469
3470	      /* See if the two operands are the same.  */
3471
3472	      if ((REG_P (folded_arg0)
3473		   && REG_P (folded_arg1)
3474		   && (REG_QTY (REGNO (folded_arg0))
3475		       == REG_QTY (REGNO (folded_arg1))))
3476		  || ((p0 = lookup (folded_arg0,
3477				    SAFE_HASH (folded_arg0, mode_arg0),
3478				    mode_arg0))
3479		      && (p1 = lookup (folded_arg1,
3480				       SAFE_HASH (folded_arg1, mode_arg0),
3481				       mode_arg0))
3482		      && p0->first_same_value == p1->first_same_value))
3483		folded_arg1 = folded_arg0;
3484
3485	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3486		 doing now is either the same as we did before or the reverse
3487		 (we only check the reverse if not floating-point).  */
3488	      else if (REG_P (folded_arg0))
3489		{
3490		  int qty = REG_QTY (REGNO (folded_arg0));
3491
3492		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3493		    {
3494		      struct qty_table_elem *ent = &qty_table[qty];
3495
3496		      if ((comparison_dominates_p (ent->comparison_code, code)
3497			   || (! FLOAT_MODE_P (mode_arg0)
3498			       && comparison_dominates_p (ent->comparison_code,
3499						          reverse_condition (code))))
3500			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3501			      || (const_arg1
3502				  && rtx_equal_p (ent->comparison_const,
3503						  const_arg1))
3504			      || (REG_P (folded_arg1)
3505				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3506			{
3507			  if (comparison_dominates_p (ent->comparison_code, code))
3508			    {
3509			      if (true_rtx)
3510				return true_rtx;
3511			      else
3512				break;
3513			    }
3514			  else
3515			    return false_rtx;
3516			}
3517		    }
3518		}
3519	    }
3520	}
3521
3522      /* If we are comparing against zero, see if the first operand is
3523	 equivalent to an IOR with a constant.  If so, we may be able to
3524	 determine the result of this comparison.  */
3525      if (const_arg1 == const0_rtx && !const_arg0)
3526	{
3527	  rtx y = lookup_as_function (folded_arg0, IOR);
3528	  rtx inner_const;
3529
3530	  if (y != 0
3531	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3532	      && CONST_INT_P (inner_const)
3533	      && INTVAL (inner_const) != 0)
3534	    folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3535	}
3536
3537      {
3538	rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3539	rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3540        new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3541      }
3542      break;
3543
3544    case RTX_BIN_ARITH:
3545    case RTX_COMM_ARITH:
3546      switch (code)
3547	{
3548	case PLUS:
3549	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
3550	     with that LABEL_REF as its second operand.  If so, the result is
3551	     the first operand of that MINUS.  This handles switches with an
3552	     ADDR_DIFF_VEC table.  */
3553	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3554	    {
3555	      rtx y
3556		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
3557		: lookup_as_function (folded_arg0, MINUS);
3558
3559	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3560		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3561		return XEXP (y, 0);
3562
3563	      /* Now try for a CONST of a MINUS like the above.  */
3564	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3565			: lookup_as_function (folded_arg0, CONST))) != 0
3566		  && GET_CODE (XEXP (y, 0)) == MINUS
3567		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3568		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3569		return XEXP (XEXP (y, 0), 0);
3570	    }
3571
3572	  /* Likewise if the operands are in the other order.  */
3573	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3574	    {
3575	      rtx y
3576		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
3577		: lookup_as_function (folded_arg1, MINUS);
3578
3579	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3580		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3581		return XEXP (y, 0);
3582
3583	      /* Now try for a CONST of a MINUS like the above.  */
3584	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3585			: lookup_as_function (folded_arg1, CONST))) != 0
3586		  && GET_CODE (XEXP (y, 0)) == MINUS
3587		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3588		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3589		return XEXP (XEXP (y, 0), 0);
3590	    }
3591
3592	  /* If second operand is a register equivalent to a negative
3593	     CONST_INT, see if we can find a register equivalent to the
3594	     positive constant.  Make a MINUS if so.  Don't do this for
3595	     a non-negative constant since we might then alternate between
3596	     choosing positive and negative constants.  Having the positive
3597	     constant previously-used is the more common case.  Be sure
3598	     the resulting constant is non-negative; if const_arg1 were
3599	     the smallest negative number this would overflow: depending
3600	     on the mode, this would either just be the same value (and
3601	     hence not save anything) or be incorrect.  */
3602	  if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3603	      && INTVAL (const_arg1) < 0
3604	      /* This used to test
3605
3606	         -INTVAL (const_arg1) >= 0
3607
3608		 But The Sun V5.0 compilers mis-compiled that test.  So
3609		 instead we test for the problematic value in a more direct
3610		 manner and hope the Sun compilers get it correct.  */
3611	      && INTVAL (const_arg1) !=
3612	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3613	      && REG_P (folded_arg1))
3614	    {
3615	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
3616	      struct table_elt *p
3617		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
3618
3619	      if (p)
3620		for (p = p->first_same_value; p; p = p->next_same_value)
3621		  if (REG_P (p->exp))
3622		    return simplify_gen_binary (MINUS, mode, folded_arg0,
3623						canon_reg (p->exp, NULL_RTX));
3624	    }
3625	  goto from_plus;
3626
3627	case MINUS:
3628	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3629	     If so, produce (PLUS Z C2-C).  */
3630	  if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3631	    {
3632	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3633	      if (y && CONST_INT_P (XEXP (y, 1)))
3634		return fold_rtx (plus_constant (copy_rtx (y),
3635						-INTVAL (const_arg1)),
3636				 NULL_RTX);
3637	    }
3638
3639	  /* Fall through.  */
3640
3641	from_plus:
3642	case SMIN:    case SMAX:      case UMIN:    case UMAX:
3643	case IOR:     case AND:       case XOR:
3644	case MULT:
3645	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
3646	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3647	     is known to be of similar form, we may be able to replace the
3648	     operation with a combined operation.  This may eliminate the
3649	     intermediate operation if every use is simplified in this way.
3650	     Note that the similar optimization done by combine.c only works
3651	     if the intermediate operation's result has only one reference.  */
3652
3653	  if (REG_P (folded_arg0)
3654	      && const_arg1 && CONST_INT_P (const_arg1))
3655	    {
3656	      int is_shift
3657		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3658	      rtx y, inner_const, new_const;
3659	      rtx canon_const_arg1 = const_arg1;
3660	      enum rtx_code associate_code;
3661
3662	      if (is_shift
3663		  && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
3664		      || INTVAL (const_arg1) < 0))
3665		{
3666		  if (SHIFT_COUNT_TRUNCATED)
3667		    canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3668						& (GET_MODE_BITSIZE (mode)
3669						   - 1));
3670		  else
3671		    break;
3672		}
3673
3674	      y = lookup_as_function (folded_arg0, code);
3675	      if (y == 0)
3676		break;
3677
3678	      /* If we have compiled a statement like
3679		 "if (x == (x & mask1))", and now are looking at
3680		 "x & mask2", we will have a case where the first operand
3681		 of Y is the same as our first operand.  Unless we detect
3682		 this case, an infinite loop will result.  */
3683	      if (XEXP (y, 0) == folded_arg0)
3684		break;
3685
3686	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3687	      if (!inner_const || !CONST_INT_P (inner_const))
3688		break;
3689
3690	      /* Don't associate these operations if they are a PLUS with the
3691		 same constant and it is a power of two.  These might be doable
3692		 with a pre- or post-increment.  Similarly for two subtracts of
3693		 identical powers of two with post decrement.  */
3694
3695	      if (code == PLUS && const_arg1 == inner_const
3696		  && ((HAVE_PRE_INCREMENT
3697			  && exact_log2 (INTVAL (const_arg1)) >= 0)
3698		      || (HAVE_POST_INCREMENT
3699			  && exact_log2 (INTVAL (const_arg1)) >= 0)
3700		      || (HAVE_PRE_DECREMENT
3701			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
3702		      || (HAVE_POST_DECREMENT
3703			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3704		break;
3705
3706	      /* ??? Vector mode shifts by scalar
3707		 shift operand are not supported yet.  */
3708	      if (is_shift && VECTOR_MODE_P (mode))
3709                break;
3710
3711	      if (is_shift
3712		  && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
3713		      || INTVAL (inner_const) < 0))
3714		{
3715		  if (SHIFT_COUNT_TRUNCATED)
3716		    inner_const = GEN_INT (INTVAL (inner_const)
3717					   & (GET_MODE_BITSIZE (mode) - 1));
3718		  else
3719		    break;
3720		}
3721
3722	      /* Compute the code used to compose the constants.  For example,
3723		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
3724
3725	      associate_code = (is_shift || code == MINUS ? PLUS : code);
3726
3727	      new_const = simplify_binary_operation (associate_code, mode,
3728						     canon_const_arg1,
3729						     inner_const);
3730
3731	      if (new_const == 0)
3732		break;
3733
3734	      /* If we are associating shift operations, don't let this
3735		 produce a shift of the size of the object or larger.
3736		 This could occur when we follow a sign-extend by a right
3737		 shift on a machine that does a sign-extend as a pair
3738		 of shifts.  */
3739
3740	      if (is_shift
3741		  && CONST_INT_P (new_const)
3742		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3743		{
3744		  /* As an exception, we can turn an ASHIFTRT of this
3745		     form into a shift of the number of bits - 1.  */
3746		  if (code == ASHIFTRT)
3747		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3748		  else if (!side_effects_p (XEXP (y, 0)))
3749		    return CONST0_RTX (mode);
3750		  else
3751		    break;
3752		}
3753
3754	      y = copy_rtx (XEXP (y, 0));
3755
3756	      /* If Y contains our first operand (the most common way this
3757		 can happen is if Y is a MEM), we would do into an infinite
3758		 loop if we tried to fold it.  So don't in that case.  */
3759
3760	      if (! reg_mentioned_p (folded_arg0, y))
3761		y = fold_rtx (y, insn);
3762
3763	      return simplify_gen_binary (code, mode, y, new_const);
3764	    }
3765	  break;
3766
3767	case DIV:       case UDIV:
3768	  /* ??? The associative optimization performed immediately above is
3769	     also possible for DIV and UDIV using associate_code of MULT.
3770	     However, we would need extra code to verify that the
3771	     multiplication does not overflow, that is, there is no overflow
3772	     in the calculation of new_const.  */
3773	  break;
3774
3775	default:
3776	  break;
3777	}
3778
3779      new_rtx = simplify_binary_operation (code, mode,
3780				       const_arg0 ? const_arg0 : folded_arg0,
3781				       const_arg1 ? const_arg1 : folded_arg1);
3782      break;
3783
3784    case RTX_OBJ:
3785      /* (lo_sum (high X) X) is simply X.  */
3786      if (code == LO_SUM && const_arg0 != 0
3787	  && GET_CODE (const_arg0) == HIGH
3788	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3789	return const_arg1;
3790      break;
3791
3792    case RTX_TERNARY:
3793    case RTX_BITFIELD_OPS:
3794      new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3795					const_arg0 ? const_arg0 : folded_arg0,
3796					const_arg1 ? const_arg1 : folded_arg1,
3797					const_arg2 ? const_arg2 : XEXP (x, 2));
3798      break;
3799
3800    default:
3801      break;
3802    }
3803
3804  return new_rtx ? new_rtx : x;
3805}
3806
3807/* Return a constant value currently equivalent to X.
3808   Return 0 if we don't know one.  */
3809
3810static rtx
3811equiv_constant (rtx x)
3812{
3813  if (REG_P (x)
3814      && REGNO_QTY_VALID_P (REGNO (x)))
3815    {
3816      int x_q = REG_QTY (REGNO (x));
3817      struct qty_table_elem *x_ent = &qty_table[x_q];
3818
3819      if (x_ent->const_rtx)
3820	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3821    }
3822
3823  if (x == 0 || CONSTANT_P (x))
3824    return x;
3825
3826  if (GET_CODE (x) == SUBREG)
3827    {
3828      enum machine_mode mode = GET_MODE (x);
3829      enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3830      rtx new_rtx;
3831
3832      /* See if we previously assigned a constant value to this SUBREG.  */
3833      if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3834          || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3835          || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3836        return new_rtx;
3837
3838      /* If we didn't and if doing so makes sense, see if we previously
3839	 assigned a constant value to the enclosing word mode SUBREG.  */
3840      if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3841	  && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3842	{
3843	  int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3844	  if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3845	    {
3846	      rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3847	      new_rtx = lookup_as_function (y, CONST_INT);
3848	      if (new_rtx)
3849		return gen_lowpart (mode, new_rtx);
3850	    }
3851	}
3852
3853      /* Otherwise see if we already have a constant for the inner REG.  */
3854      if (REG_P (SUBREG_REG (x))
3855	  && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3856        return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3857
3858      return 0;
3859    }
3860
3861  /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3862     the hash table in case its value was seen before.  */
3863
3864  if (MEM_P (x))
3865    {
3866      struct table_elt *elt;
3867
3868      x = avoid_constant_pool_reference (x);
3869      if (CONSTANT_P (x))
3870	return x;
3871
3872      elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3873      if (elt == 0)
3874	return 0;
3875
3876      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3877	if (elt->is_const && CONSTANT_P (elt->exp))
3878	  return elt->exp;
3879    }
3880
3881  return 0;
3882}
3883
3884/* Given INSN, a jump insn, TAKEN indicates if we are following the
3885   "taken" branch.
3886
3887   In certain cases, this can cause us to add an equivalence.  For example,
3888   if we are following the taken case of
3889	if (i == 2)
3890   we can add the fact that `i' and '2' are now equivalent.
3891
3892   In any case, we can record that this comparison was passed.  If the same
3893   comparison is seen later, we will know its value.  */
3894
3895static void
3896record_jump_equiv (rtx insn, bool taken)
3897{
3898  int cond_known_true;
3899  rtx op0, op1;
3900  rtx set;
3901  enum machine_mode mode, mode0, mode1;
3902  int reversed_nonequality = 0;
3903  enum rtx_code code;
3904
3905  /* Ensure this is the right kind of insn.  */
3906  gcc_assert (any_condjump_p (insn));
3907
3908  set = pc_set (insn);
3909
3910  /* See if this jump condition is known true or false.  */
3911  if (taken)
3912    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3913  else
3914    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3915
3916  /* Get the type of comparison being done and the operands being compared.
3917     If we had to reverse a non-equality condition, record that fact so we
3918     know that it isn't valid for floating-point.  */
3919  code = GET_CODE (XEXP (SET_SRC (set), 0));
3920  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3921  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3922
3923  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3924  if (! cond_known_true)
3925    {
3926      code = reversed_comparison_code_parts (code, op0, op1, insn);
3927
3928      /* Don't remember if we can't find the inverse.  */
3929      if (code == UNKNOWN)
3930	return;
3931    }
3932
3933  /* The mode is the mode of the non-constant.  */
3934  mode = mode0;
3935  if (mode1 != VOIDmode)
3936    mode = mode1;
3937
3938  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3939}
3940
3941/* Yet another form of subreg creation.  In this case, we want something in
3942   MODE, and we should assume OP has MODE iff it is naturally modeless.  */
3943
3944static rtx
3945record_jump_cond_subreg (enum machine_mode mode, rtx op)
3946{
3947  enum machine_mode op_mode = GET_MODE (op);
3948  if (op_mode == mode || op_mode == VOIDmode)
3949    return op;
3950  return lowpart_subreg (mode, op, op_mode);
3951}
3952
3953/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3954   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3955   Make any useful entries we can with that information.  Called from
3956   above function and called recursively.  */
3957
3958static void
3959record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3960		  rtx op1, int reversed_nonequality)
3961{
3962  unsigned op0_hash, op1_hash;
3963  int op0_in_memory, op1_in_memory;
3964  struct table_elt *op0_elt, *op1_elt;
3965
3966  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3967     we know that they are also equal in the smaller mode (this is also
3968     true for all smaller modes whether or not there is a SUBREG, but
3969     is not worth testing for with no SUBREG).  */
3970
3971  /* Note that GET_MODE (op0) may not equal MODE.  */
3972  if (code == EQ && GET_CODE (op0) == SUBREG
3973      && (GET_MODE_SIZE (GET_MODE (op0))
3974	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3975    {
3976      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3977      rtx tem = record_jump_cond_subreg (inner_mode, op1);
3978      if (tem)
3979	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3980			  reversed_nonequality);
3981    }
3982
3983  if (code == EQ && GET_CODE (op1) == SUBREG
3984      && (GET_MODE_SIZE (GET_MODE (op1))
3985	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3986    {
3987      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3988      rtx tem = record_jump_cond_subreg (inner_mode, op0);
3989      if (tem)
3990	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3991			  reversed_nonequality);
3992    }
3993
3994  /* Similarly, if this is an NE comparison, and either is a SUBREG
3995     making a smaller mode, we know the whole thing is also NE.  */
3996
3997  /* Note that GET_MODE (op0) may not equal MODE;
3998     if we test MODE instead, we can get an infinite recursion
3999     alternating between two modes each wider than MODE.  */
4000
4001  if (code == NE && GET_CODE (op0) == SUBREG
4002      && subreg_lowpart_p (op0)
4003      && (GET_MODE_SIZE (GET_MODE (op0))
4004	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4005    {
4006      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4007      rtx tem = record_jump_cond_subreg (inner_mode, op1);
4008      if (tem)
4009	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4010			  reversed_nonequality);
4011    }
4012
4013  if (code == NE && GET_CODE (op1) == SUBREG
4014      && subreg_lowpart_p (op1)
4015      && (GET_MODE_SIZE (GET_MODE (op1))
4016	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4017    {
4018      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4019      rtx tem = record_jump_cond_subreg (inner_mode, op0);
4020      if (tem)
4021	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4022			  reversed_nonequality);
4023    }
4024
4025  /* Hash both operands.  */
4026
4027  do_not_record = 0;
4028  hash_arg_in_memory = 0;
4029  op0_hash = HASH (op0, mode);
4030  op0_in_memory = hash_arg_in_memory;
4031
4032  if (do_not_record)
4033    return;
4034
4035  do_not_record = 0;
4036  hash_arg_in_memory = 0;
4037  op1_hash = HASH (op1, mode);
4038  op1_in_memory = hash_arg_in_memory;
4039
4040  if (do_not_record)
4041    return;
4042
4043  /* Look up both operands.  */
4044  op0_elt = lookup (op0, op0_hash, mode);
4045  op1_elt = lookup (op1, op1_hash, mode);
4046
4047  /* If both operands are already equivalent or if they are not in the
4048     table but are identical, do nothing.  */
4049  if ((op0_elt != 0 && op1_elt != 0
4050       && op0_elt->first_same_value == op1_elt->first_same_value)
4051      || op0 == op1 || rtx_equal_p (op0, op1))
4052    return;
4053
4054  /* If we aren't setting two things equal all we can do is save this
4055     comparison.   Similarly if this is floating-point.  In the latter
4056     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4057     If we record the equality, we might inadvertently delete code
4058     whose intent was to change -0 to +0.  */
4059
4060  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4061    {
4062      struct qty_table_elem *ent;
4063      int qty;
4064
4065      /* If we reversed a floating-point comparison, if OP0 is not a
4066	 register, or if OP1 is neither a register or constant, we can't
4067	 do anything.  */
4068
4069      if (!REG_P (op1))
4070	op1 = equiv_constant (op1);
4071
4072      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4073	  || !REG_P (op0) || op1 == 0)
4074	return;
4075
4076      /* Put OP0 in the hash table if it isn't already.  This gives it a
4077	 new quantity number.  */
4078      if (op0_elt == 0)
4079	{
4080	  if (insert_regs (op0, NULL, 0))
4081	    {
4082	      rehash_using_reg (op0);
4083	      op0_hash = HASH (op0, mode);
4084
4085	      /* If OP0 is contained in OP1, this changes its hash code
4086		 as well.  Faster to rehash than to check, except
4087		 for the simple case of a constant.  */
4088	      if (! CONSTANT_P (op1))
4089		op1_hash = HASH (op1,mode);
4090	    }
4091
4092	  op0_elt = insert (op0, NULL, op0_hash, mode);
4093	  op0_elt->in_memory = op0_in_memory;
4094	}
4095
4096      qty = REG_QTY (REGNO (op0));
4097      ent = &qty_table[qty];
4098
4099      ent->comparison_code = code;
4100      if (REG_P (op1))
4101	{
4102	  /* Look it up again--in case op0 and op1 are the same.  */
4103	  op1_elt = lookup (op1, op1_hash, mode);
4104
4105	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4106	  if (op1_elt == 0)
4107	    {
4108	      if (insert_regs (op1, NULL, 0))
4109		{
4110		  rehash_using_reg (op1);
4111		  op1_hash = HASH (op1, mode);
4112		}
4113
4114	      op1_elt = insert (op1, NULL, op1_hash, mode);
4115	      op1_elt->in_memory = op1_in_memory;
4116	    }
4117
4118	  ent->comparison_const = NULL_RTX;
4119	  ent->comparison_qty = REG_QTY (REGNO (op1));
4120	}
4121      else
4122	{
4123	  ent->comparison_const = op1;
4124	  ent->comparison_qty = -1;
4125	}
4126
4127      return;
4128    }
4129
4130  /* If either side is still missing an equivalence, make it now,
4131     then merge the equivalences.  */
4132
4133  if (op0_elt == 0)
4134    {
4135      if (insert_regs (op0, NULL, 0))
4136	{
4137	  rehash_using_reg (op0);
4138	  op0_hash = HASH (op0, mode);
4139	}
4140
4141      op0_elt = insert (op0, NULL, op0_hash, mode);
4142      op0_elt->in_memory = op0_in_memory;
4143    }
4144
4145  if (op1_elt == 0)
4146    {
4147      if (insert_regs (op1, NULL, 0))
4148	{
4149	  rehash_using_reg (op1);
4150	  op1_hash = HASH (op1, mode);
4151	}
4152
4153      op1_elt = insert (op1, NULL, op1_hash, mode);
4154      op1_elt->in_memory = op1_in_memory;
4155    }
4156
4157  merge_equiv_classes (op0_elt, op1_elt);
4158}
4159
4160/* CSE processing for one instruction.
4161   First simplify sources and addresses of all assignments
4162   in the instruction, using previously-computed equivalents values.
4163   Then install the new sources and destinations in the table
4164   of available values.  */
4165
4166/* Data on one SET contained in the instruction.  */
4167
4168struct set
4169{
4170  /* The SET rtx itself.  */
4171  rtx rtl;
4172  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4173  rtx src;
4174  /* The hash-table element for the SET_SRC of the SET.  */
4175  struct table_elt *src_elt;
4176  /* Hash value for the SET_SRC.  */
4177  unsigned src_hash;
4178  /* Hash value for the SET_DEST.  */
4179  unsigned dest_hash;
4180  /* The SET_DEST, with SUBREG, etc., stripped.  */
4181  rtx inner_dest;
4182  /* Nonzero if the SET_SRC is in memory.  */
4183  char src_in_memory;
4184  /* Nonzero if the SET_SRC contains something
4185     whose value cannot be predicted and understood.  */
4186  char src_volatile;
4187  /* Original machine mode, in case it becomes a CONST_INT.
4188     The size of this field should match the size of the mode
4189     field of struct rtx_def (see rtl.h).  */
4190  ENUM_BITFIELD(machine_mode) mode : 8;
4191  /* A constant equivalent for SET_SRC, if any.  */
4192  rtx src_const;
4193  /* Hash value of constant equivalent for SET_SRC.  */
4194  unsigned src_const_hash;
4195  /* Table entry for constant equivalent for SET_SRC, if any.  */
4196  struct table_elt *src_const_elt;
4197  /* Table entry for the destination address.  */
4198  struct table_elt *dest_addr_elt;
4199};
4200
4201static void
4202cse_insn (rtx insn)
4203{
4204  rtx x = PATTERN (insn);
4205  int i;
4206  rtx tem;
4207  int n_sets = 0;
4208
4209  rtx src_eqv = 0;
4210  struct table_elt *src_eqv_elt = 0;
4211  int src_eqv_volatile = 0;
4212  int src_eqv_in_memory = 0;
4213  unsigned src_eqv_hash = 0;
4214
4215  struct set *sets = (struct set *) 0;
4216
4217  this_insn = insn;
4218#ifdef HAVE_cc0
4219  /* Records what this insn does to set CC0.  */
4220  this_insn_cc0 = 0;
4221  this_insn_cc0_mode = VOIDmode;
4222#endif
4223
4224  /* Find all the SETs and CLOBBERs in this instruction.
4225     Record all the SETs in the array `set' and count them.
4226     Also determine whether there is a CLOBBER that invalidates
4227     all memory references, or all references at varying addresses.  */
4228
4229  if (CALL_P (insn))
4230    {
4231      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4232	{
4233	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4234	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4235	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4236	}
4237    }
4238
4239  if (GET_CODE (x) == SET)
4240    {
4241      sets = XALLOCA (struct set);
4242      sets[0].rtl = x;
4243
4244      /* Ignore SETs that are unconditional jumps.
4245	 They never need cse processing, so this does not hurt.
4246	 The reason is not efficiency but rather
4247	 so that we can test at the end for instructions
4248	 that have been simplified to unconditional jumps
4249	 and not be misled by unchanged instructions
4250	 that were unconditional jumps to begin with.  */
4251      if (SET_DEST (x) == pc_rtx
4252	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4253	;
4254
4255      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4256	 The hard function value register is used only once, to copy to
4257	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4258	 Ensure we invalidate the destination register.  On the 80386 no
4259	 other code would invalidate it since it is a fixed_reg.
4260	 We need not check the return of apply_change_group; see canon_reg.  */
4261
4262      else if (GET_CODE (SET_SRC (x)) == CALL)
4263	{
4264	  canon_reg (SET_SRC (x), insn);
4265	  apply_change_group ();
4266	  fold_rtx (SET_SRC (x), insn);
4267	  invalidate (SET_DEST (x), VOIDmode);
4268	}
4269      else
4270	n_sets = 1;
4271    }
4272  else if (GET_CODE (x) == PARALLEL)
4273    {
4274      int lim = XVECLEN (x, 0);
4275
4276      sets = XALLOCAVEC (struct set, lim);
4277
4278      /* Find all regs explicitly clobbered in this insn,
4279	 and ensure they are not replaced with any other regs
4280	 elsewhere in this insn.
4281	 When a reg that is clobbered is also used for input,
4282	 we should presume that that is for a reason,
4283	 and we should not substitute some other register
4284	 which is not supposed to be clobbered.
4285	 Therefore, this loop cannot be merged into the one below
4286	 because a CALL may precede a CLOBBER and refer to the
4287	 value clobbered.  We must not let a canonicalization do
4288	 anything in that case.  */
4289      for (i = 0; i < lim; i++)
4290	{
4291	  rtx y = XVECEXP (x, 0, i);
4292	  if (GET_CODE (y) == CLOBBER)
4293	    {
4294	      rtx clobbered = XEXP (y, 0);
4295
4296	      if (REG_P (clobbered)
4297		  || GET_CODE (clobbered) == SUBREG)
4298		invalidate (clobbered, VOIDmode);
4299	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4300		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4301		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4302	    }
4303	}
4304
4305      for (i = 0; i < lim; i++)
4306	{
4307	  rtx y = XVECEXP (x, 0, i);
4308	  if (GET_CODE (y) == SET)
4309	    {
4310	      /* As above, we ignore unconditional jumps and call-insns and
4311		 ignore the result of apply_change_group.  */
4312	      if (GET_CODE (SET_SRC (y)) == CALL)
4313		{
4314		  canon_reg (SET_SRC (y), insn);
4315		  apply_change_group ();
4316		  fold_rtx (SET_SRC (y), insn);
4317		  invalidate (SET_DEST (y), VOIDmode);
4318		}
4319	      else if (SET_DEST (y) == pc_rtx
4320		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4321		;
4322	      else
4323		sets[n_sets++].rtl = y;
4324	    }
4325	  else if (GET_CODE (y) == CLOBBER)
4326	    {
4327	      /* If we clobber memory, canon the address.
4328		 This does nothing when a register is clobbered
4329		 because we have already invalidated the reg.  */
4330	      if (MEM_P (XEXP (y, 0)))
4331		canon_reg (XEXP (y, 0), insn);
4332	    }
4333	  else if (GET_CODE (y) == USE
4334		   && ! (REG_P (XEXP (y, 0))
4335			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4336	    canon_reg (y, insn);
4337	  else if (GET_CODE (y) == CALL)
4338	    {
4339	      /* The result of apply_change_group can be ignored; see
4340		 canon_reg.  */
4341	      canon_reg (y, insn);
4342	      apply_change_group ();
4343	      fold_rtx (y, insn);
4344	    }
4345	}
4346    }
4347  else if (GET_CODE (x) == CLOBBER)
4348    {
4349      if (MEM_P (XEXP (x, 0)))
4350	canon_reg (XEXP (x, 0), insn);
4351    }
4352
4353  /* Canonicalize a USE of a pseudo register or memory location.  */
4354  else if (GET_CODE (x) == USE
4355	   && ! (REG_P (XEXP (x, 0))
4356		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4357    canon_reg (XEXP (x, 0), insn);
4358  else if (GET_CODE (x) == CALL)
4359    {
4360      /* The result of apply_change_group can be ignored; see canon_reg.  */
4361      canon_reg (x, insn);
4362      apply_change_group ();
4363      fold_rtx (x, insn);
4364    }
4365  else if (DEBUG_INSN_P (insn))
4366    canon_reg (PATTERN (insn), insn);
4367
4368  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4369     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4370     is handled specially for this case, and if it isn't set, then there will
4371     be no equivalence for the destination.  */
4372  if (n_sets == 1 && REG_NOTES (insn) != 0
4373      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4374      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4375	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4376    {
4377      /* The result of apply_change_group can be ignored; see canon_reg.  */
4378      canon_reg (XEXP (tem, 0), insn);
4379      apply_change_group ();
4380      src_eqv = fold_rtx (XEXP (tem, 0), insn);
4381      XEXP (tem, 0) = copy_rtx (src_eqv);
4382      df_notes_rescan (insn);
4383    }
4384
4385  /* Canonicalize sources and addresses of destinations.
4386     We do this in a separate pass to avoid problems when a MATCH_DUP is
4387     present in the insn pattern.  In that case, we want to ensure that
4388     we don't break the duplicate nature of the pattern.  So we will replace
4389     both operands at the same time.  Otherwise, we would fail to find an
4390     equivalent substitution in the loop calling validate_change below.
4391
4392     We used to suppress canonicalization of DEST if it appears in SRC,
4393     but we don't do this any more.  */
4394
4395  for (i = 0; i < n_sets; i++)
4396    {
4397      rtx dest = SET_DEST (sets[i].rtl);
4398      rtx src = SET_SRC (sets[i].rtl);
4399      rtx new_rtx = canon_reg (src, insn);
4400
4401      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4402
4403      if (GET_CODE (dest) == ZERO_EXTRACT)
4404	{
4405	  validate_change (insn, &XEXP (dest, 1),
4406			   canon_reg (XEXP (dest, 1), insn), 1);
4407	  validate_change (insn, &XEXP (dest, 2),
4408			   canon_reg (XEXP (dest, 2), insn), 1);
4409	}
4410
4411      while (GET_CODE (dest) == SUBREG
4412	     || GET_CODE (dest) == ZERO_EXTRACT
4413	     || GET_CODE (dest) == STRICT_LOW_PART)
4414	dest = XEXP (dest, 0);
4415
4416      if (MEM_P (dest))
4417	canon_reg (dest, insn);
4418    }
4419
4420  /* Now that we have done all the replacements, we can apply the change
4421     group and see if they all work.  Note that this will cause some
4422     canonicalizations that would have worked individually not to be applied
4423     because some other canonicalization didn't work, but this should not
4424     occur often.
4425
4426     The result of apply_change_group can be ignored; see canon_reg.  */
4427
4428  apply_change_group ();
4429
4430  /* Set sets[i].src_elt to the class each source belongs to.
4431     Detect assignments from or to volatile things
4432     and set set[i] to zero so they will be ignored
4433     in the rest of this function.
4434
4435     Nothing in this loop changes the hash table or the register chains.  */
4436
4437  for (i = 0; i < n_sets; i++)
4438    {
4439      bool repeat = false;
4440      rtx src, dest;
4441      rtx src_folded;
4442      struct table_elt *elt = 0, *p;
4443      enum machine_mode mode;
4444      rtx src_eqv_here;
4445      rtx src_const = 0;
4446      rtx src_related = 0;
4447      bool src_related_is_const_anchor = false;
4448      struct table_elt *src_const_elt = 0;
4449      int src_cost = MAX_COST;
4450      int src_eqv_cost = MAX_COST;
4451      int src_folded_cost = MAX_COST;
4452      int src_related_cost = MAX_COST;
4453      int src_elt_cost = MAX_COST;
4454      int src_regcost = MAX_COST;
4455      int src_eqv_regcost = MAX_COST;
4456      int src_folded_regcost = MAX_COST;
4457      int src_related_regcost = MAX_COST;
4458      int src_elt_regcost = MAX_COST;
4459      /* Set nonzero if we need to call force_const_mem on with the
4460	 contents of src_folded before using it.  */
4461      int src_folded_force_flag = 0;
4462
4463      dest = SET_DEST (sets[i].rtl);
4464      src = SET_SRC (sets[i].rtl);
4465
4466      /* If SRC is a constant that has no machine mode,
4467	 hash it with the destination's machine mode.
4468	 This way we can keep different modes separate.  */
4469
4470      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4471      sets[i].mode = mode;
4472
4473      if (src_eqv)
4474	{
4475	  enum machine_mode eqvmode = mode;
4476	  if (GET_CODE (dest) == STRICT_LOW_PART)
4477	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4478	  do_not_record = 0;
4479	  hash_arg_in_memory = 0;
4480	  src_eqv_hash = HASH (src_eqv, eqvmode);
4481
4482	  /* Find the equivalence class for the equivalent expression.  */
4483
4484	  if (!do_not_record)
4485	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4486
4487	  src_eqv_volatile = do_not_record;
4488	  src_eqv_in_memory = hash_arg_in_memory;
4489	}
4490
4491      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4492	 value of the INNER register, not the destination.  So it is not
4493	 a valid substitution for the source.  But save it for later.  */
4494      if (GET_CODE (dest) == STRICT_LOW_PART)
4495	src_eqv_here = 0;
4496      else
4497	src_eqv_here = src_eqv;
4498
4499      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4500	 simplified result, which may not necessarily be valid.  */
4501      src_folded = fold_rtx (src, insn);
4502
4503#if 0
4504      /* ??? This caused bad code to be generated for the m68k port with -O2.
4505	 Suppose src is (CONST_INT -1), and that after truncation src_folded
4506	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
4507	 At the end we will add src and src_const to the same equivalence
4508	 class.  We now have 3 and -1 on the same equivalence class.  This
4509	 causes later instructions to be mis-optimized.  */
4510      /* If storing a constant in a bitfield, pre-truncate the constant
4511	 so we will be able to record it later.  */
4512      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4513	{
4514	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4515
4516	  if (CONST_INT_P (src)
4517	      && CONST_INT_P (width)
4518	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4519	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4520	    src_folded
4521	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4522					  << INTVAL (width)) - 1));
4523	}
4524#endif
4525
4526      /* Compute SRC's hash code, and also notice if it
4527	 should not be recorded at all.  In that case,
4528	 prevent any further processing of this assignment.  */
4529      do_not_record = 0;
4530      hash_arg_in_memory = 0;
4531
4532      sets[i].src = src;
4533      sets[i].src_hash = HASH (src, mode);
4534      sets[i].src_volatile = do_not_record;
4535      sets[i].src_in_memory = hash_arg_in_memory;
4536
4537      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4538	 a pseudo, do not record SRC.  Using SRC as a replacement for
4539	 anything else will be incorrect in that situation.  Note that
4540	 this usually occurs only for stack slots, in which case all the
4541	 RTL would be referring to SRC, so we don't lose any optimization
4542	 opportunities by not having SRC in the hash table.  */
4543
4544      if (MEM_P (src)
4545	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4546	  && REG_P (dest)
4547	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4548	sets[i].src_volatile = 1;
4549
4550#if 0
4551      /* It is no longer clear why we used to do this, but it doesn't
4552	 appear to still be needed.  So let's try without it since this
4553	 code hurts cse'ing widened ops.  */
4554      /* If source is a paradoxical subreg (such as QI treated as an SI),
4555	 treat it as volatile.  It may do the work of an SI in one context
4556	 where the extra bits are not being used, but cannot replace an SI
4557	 in general.  */
4558      if (GET_CODE (src) == SUBREG
4559	  && (GET_MODE_SIZE (GET_MODE (src))
4560	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4561	sets[i].src_volatile = 1;
4562#endif
4563
4564      /* Locate all possible equivalent forms for SRC.  Try to replace
4565         SRC in the insn with each cheaper equivalent.
4566
4567         We have the following types of equivalents: SRC itself, a folded
4568         version, a value given in a REG_EQUAL note, or a value related
4569	 to a constant.
4570
4571         Each of these equivalents may be part of an additional class
4572         of equivalents (if more than one is in the table, they must be in
4573         the same class; we check for this).
4574
4575	 If the source is volatile, we don't do any table lookups.
4576
4577         We note any constant equivalent for possible later use in a
4578         REG_NOTE.  */
4579
4580      if (!sets[i].src_volatile)
4581	elt = lookup (src, sets[i].src_hash, mode);
4582
4583      sets[i].src_elt = elt;
4584
4585      if (elt && src_eqv_here && src_eqv_elt)
4586	{
4587	  if (elt->first_same_value != src_eqv_elt->first_same_value)
4588	    {
4589	      /* The REG_EQUAL is indicating that two formerly distinct
4590		 classes are now equivalent.  So merge them.  */
4591	      merge_equiv_classes (elt, src_eqv_elt);
4592	      src_eqv_hash = HASH (src_eqv, elt->mode);
4593	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4594	    }
4595
4596	  src_eqv_here = 0;
4597	}
4598
4599      else if (src_eqv_elt)
4600	elt = src_eqv_elt;
4601
4602      /* Try to find a constant somewhere and record it in `src_const'.
4603	 Record its table element, if any, in `src_const_elt'.  Look in
4604	 any known equivalences first.  (If the constant is not in the
4605	 table, also set `sets[i].src_const_hash').  */
4606      if (elt)
4607	for (p = elt->first_same_value; p; p = p->next_same_value)
4608	  if (p->is_const)
4609	    {
4610	      src_const = p->exp;
4611	      src_const_elt = elt;
4612	      break;
4613	    }
4614
4615      if (src_const == 0
4616	  && (CONSTANT_P (src_folded)
4617	      /* Consider (minus (label_ref L1) (label_ref L2)) as
4618		 "constant" here so we will record it. This allows us
4619		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
4620	      || (GET_CODE (src_folded) == MINUS
4621		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4622		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4623	src_const = src_folded, src_const_elt = elt;
4624      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4625	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4626
4627      /* If we don't know if the constant is in the table, get its
4628	 hash code and look it up.  */
4629      if (src_const && src_const_elt == 0)
4630	{
4631	  sets[i].src_const_hash = HASH (src_const, mode);
4632	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4633	}
4634
4635      sets[i].src_const = src_const;
4636      sets[i].src_const_elt = src_const_elt;
4637
4638      /* If the constant and our source are both in the table, mark them as
4639	 equivalent.  Otherwise, if a constant is in the table but the source
4640	 isn't, set ELT to it.  */
4641      if (src_const_elt && elt
4642	  && src_const_elt->first_same_value != elt->first_same_value)
4643	merge_equiv_classes (elt, src_const_elt);
4644      else if (src_const_elt && elt == 0)
4645	elt = src_const_elt;
4646
4647      /* See if there is a register linearly related to a constant
4648         equivalent of SRC.  */
4649      if (src_const
4650	  && (GET_CODE (src_const) == CONST
4651	      || (src_const_elt && src_const_elt->related_value != 0)))
4652	{
4653	  src_related = use_related_value (src_const, src_const_elt);
4654	  if (src_related)
4655	    {
4656	      struct table_elt *src_related_elt
4657		= lookup (src_related, HASH (src_related, mode), mode);
4658	      if (src_related_elt && elt)
4659		{
4660		  if (elt->first_same_value
4661		      != src_related_elt->first_same_value)
4662		    /* This can occur when we previously saw a CONST
4663		       involving a SYMBOL_REF and then see the SYMBOL_REF
4664		       twice.  Merge the involved classes.  */
4665		    merge_equiv_classes (elt, src_related_elt);
4666
4667		  src_related = 0;
4668		  src_related_elt = 0;
4669		}
4670	      else if (src_related_elt && elt == 0)
4671		elt = src_related_elt;
4672	    }
4673	}
4674
4675      /* See if we have a CONST_INT that is already in a register in a
4676	 wider mode.  */
4677
4678      if (src_const && src_related == 0 && CONST_INT_P (src_const)
4679	  && GET_MODE_CLASS (mode) == MODE_INT
4680	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4681	{
4682	  enum machine_mode wider_mode;
4683
4684	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
4685	       wider_mode != VOIDmode
4686	       && GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4687	       && src_related == 0;
4688	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4689	    {
4690	      struct table_elt *const_elt
4691		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4692
4693	      if (const_elt == 0)
4694		continue;
4695
4696	      for (const_elt = const_elt->first_same_value;
4697		   const_elt; const_elt = const_elt->next_same_value)
4698		if (REG_P (const_elt->exp))
4699		  {
4700		    src_related = gen_lowpart (mode, const_elt->exp);
4701		    break;
4702		  }
4703	    }
4704	}
4705
4706      /* Another possibility is that we have an AND with a constant in
4707	 a mode narrower than a word.  If so, it might have been generated
4708	 as part of an "if" which would narrow the AND.  If we already
4709	 have done the AND in a wider mode, we can use a SUBREG of that
4710	 value.  */
4711
4712      if (flag_expensive_optimizations && ! src_related
4713	  && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4714	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4715	{
4716	  enum machine_mode tmode;
4717	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4718
4719	  for (tmode = GET_MODE_WIDER_MODE (mode);
4720	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4721	       tmode = GET_MODE_WIDER_MODE (tmode))
4722	    {
4723	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4724	      struct table_elt *larger_elt;
4725
4726	      if (inner)
4727		{
4728		  PUT_MODE (new_and, tmode);
4729		  XEXP (new_and, 0) = inner;
4730		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4731		  if (larger_elt == 0)
4732		    continue;
4733
4734		  for (larger_elt = larger_elt->first_same_value;
4735		       larger_elt; larger_elt = larger_elt->next_same_value)
4736		    if (REG_P (larger_elt->exp))
4737		      {
4738			src_related
4739			  = gen_lowpart (mode, larger_elt->exp);
4740			break;
4741		      }
4742
4743		  if (src_related)
4744		    break;
4745		}
4746	    }
4747	}
4748
4749#ifdef LOAD_EXTEND_OP
4750      /* See if a MEM has already been loaded with a widening operation;
4751	 if it has, we can use a subreg of that.  Many CISC machines
4752	 also have such operations, but this is only likely to be
4753	 beneficial on these machines.  */
4754
4755      if (flag_expensive_optimizations && src_related == 0
4756	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4757	  && GET_MODE_CLASS (mode) == MODE_INT
4758	  && MEM_P (src) && ! do_not_record
4759	  && LOAD_EXTEND_OP (mode) != UNKNOWN)
4760	{
4761	  struct rtx_def memory_extend_buf;
4762	  rtx memory_extend_rtx = &memory_extend_buf;
4763	  enum machine_mode tmode;
4764
4765	  /* Set what we are trying to extend and the operation it might
4766	     have been extended with.  */
4767	  memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
4768	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4769	  XEXP (memory_extend_rtx, 0) = src;
4770
4771	  for (tmode = GET_MODE_WIDER_MODE (mode);
4772	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4773	       tmode = GET_MODE_WIDER_MODE (tmode))
4774	    {
4775	      struct table_elt *larger_elt;
4776
4777	      PUT_MODE (memory_extend_rtx, tmode);
4778	      larger_elt = lookup (memory_extend_rtx,
4779				   HASH (memory_extend_rtx, tmode), tmode);
4780	      if (larger_elt == 0)
4781		continue;
4782
4783	      for (larger_elt = larger_elt->first_same_value;
4784		   larger_elt; larger_elt = larger_elt->next_same_value)
4785		if (REG_P (larger_elt->exp))
4786		  {
4787		    src_related = gen_lowpart (mode, larger_elt->exp);
4788		    break;
4789		  }
4790
4791	      if (src_related)
4792		break;
4793	    }
4794	}
4795#endif /* LOAD_EXTEND_OP */
4796
4797      /* Try to express the constant using a register+offset expression
4798	 derived from a constant anchor.  */
4799
4800      if (targetm.const_anchor
4801	  && !src_related
4802	  && src_const
4803	  && GET_CODE (src_const) == CONST_INT)
4804	{
4805	  src_related = try_const_anchors (src_const, mode);
4806	  src_related_is_const_anchor = src_related != NULL_RTX;
4807	}
4808
4809
4810      if (src == src_folded)
4811	src_folded = 0;
4812
4813      /* At this point, ELT, if nonzero, points to a class of expressions
4814         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4815	 and SRC_RELATED, if nonzero, each contain additional equivalent
4816	 expressions.  Prune these latter expressions by deleting expressions
4817	 already in the equivalence class.
4818
4819	 Check for an equivalent identical to the destination.  If found,
4820	 this is the preferred equivalent since it will likely lead to
4821	 elimination of the insn.  Indicate this by placing it in
4822	 `src_related'.  */
4823
4824      if (elt)
4825	elt = elt->first_same_value;
4826      for (p = elt; p; p = p->next_same_value)
4827	{
4828	  enum rtx_code code = GET_CODE (p->exp);
4829
4830	  /* If the expression is not valid, ignore it.  Then we do not
4831	     have to check for validity below.  In most cases, we can use
4832	     `rtx_equal_p', since canonicalization has already been done.  */
4833	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4834	    continue;
4835
4836	  /* Also skip paradoxical subregs, unless that's what we're
4837	     looking for.  */
4838	  if (code == SUBREG
4839	      && (GET_MODE_SIZE (GET_MODE (p->exp))
4840		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
4841	      && ! (src != 0
4842		    && GET_CODE (src) == SUBREG
4843		    && GET_MODE (src) == GET_MODE (p->exp)
4844		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4845			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4846	    continue;
4847
4848	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4849	    src = 0;
4850	  else if (src_folded && GET_CODE (src_folded) == code
4851		   && rtx_equal_p (src_folded, p->exp))
4852	    src_folded = 0;
4853	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4854		   && rtx_equal_p (src_eqv_here, p->exp))
4855	    src_eqv_here = 0;
4856	  else if (src_related && GET_CODE (src_related) == code
4857		   && rtx_equal_p (src_related, p->exp))
4858	    src_related = 0;
4859
4860	  /* This is the same as the destination of the insns, we want
4861	     to prefer it.  Copy it to src_related.  The code below will
4862	     then give it a negative cost.  */
4863	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4864	    src_related = dest;
4865	}
4866
4867      /* Find the cheapest valid equivalent, trying all the available
4868         possibilities.  Prefer items not in the hash table to ones
4869         that are when they are equal cost.  Note that we can never
4870         worsen an insn as the current contents will also succeed.
4871	 If we find an equivalent identical to the destination, use it as best,
4872	 since this insn will probably be eliminated in that case.  */
4873      if (src)
4874	{
4875	  if (rtx_equal_p (src, dest))
4876	    src_cost = src_regcost = -1;
4877	  else
4878	    {
4879	      src_cost = COST (src);
4880	      src_regcost = approx_reg_cost (src);
4881	    }
4882	}
4883
4884      if (src_eqv_here)
4885	{
4886	  if (rtx_equal_p (src_eqv_here, dest))
4887	    src_eqv_cost = src_eqv_regcost = -1;
4888	  else
4889	    {
4890	      src_eqv_cost = COST (src_eqv_here);
4891	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
4892	    }
4893	}
4894
4895      if (src_folded)
4896	{
4897	  if (rtx_equal_p (src_folded, dest))
4898	    src_folded_cost = src_folded_regcost = -1;
4899	  else
4900	    {
4901	      src_folded_cost = COST (src_folded);
4902	      src_folded_regcost = approx_reg_cost (src_folded);
4903	    }
4904	}
4905
4906      if (src_related)
4907	{
4908	  if (rtx_equal_p (src_related, dest))
4909	    src_related_cost = src_related_regcost = -1;
4910	  else
4911	    {
4912	      src_related_cost = COST (src_related);
4913	      src_related_regcost = approx_reg_cost (src_related);
4914
4915	      /* If a const-anchor is used to synthesize a constant that
4916		 normally requires multiple instructions then slightly prefer
4917		 it over the original sequence.  These instructions are likely
4918		 to become redundant now.  We can't compare against the cost
4919		 of src_eqv_here because, on MIPS for example, multi-insn
4920		 constants have zero cost; they are assumed to be hoisted from
4921		 loops.  */
4922	      if (src_related_is_const_anchor
4923		  && src_related_cost == src_cost
4924		  && src_eqv_here)
4925		src_related_cost--;
4926	    }
4927	}
4928
4929      /* If this was an indirect jump insn, a known label will really be
4930	 cheaper even though it looks more expensive.  */
4931      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
4932	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
4933
4934      /* Terminate loop when replacement made.  This must terminate since
4935         the current contents will be tested and will always be valid.  */
4936      while (1)
4937	{
4938	  rtx trial;
4939
4940	  /* Skip invalid entries.  */
4941	  while (elt && !REG_P (elt->exp)
4942		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
4943	    elt = elt->next_same_value;
4944
4945	  /* A paradoxical subreg would be bad here: it'll be the right
4946	     size, but later may be adjusted so that the upper bits aren't
4947	     what we want.  So reject it.  */
4948	  if (elt != 0
4949	      && GET_CODE (elt->exp) == SUBREG
4950	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
4951		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
4952	      /* It is okay, though, if the rtx we're trying to match
4953		 will ignore any of the bits we can't predict.  */
4954	      && ! (src != 0
4955		    && GET_CODE (src) == SUBREG
4956		    && GET_MODE (src) == GET_MODE (elt->exp)
4957		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4958			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
4959	    {
4960	      elt = elt->next_same_value;
4961	      continue;
4962	    }
4963
4964	  if (elt)
4965	    {
4966	      src_elt_cost = elt->cost;
4967	      src_elt_regcost = elt->regcost;
4968	    }
4969
4970	  /* Find cheapest and skip it for the next time.   For items
4971	     of equal cost, use this order:
4972	     src_folded, src, src_eqv, src_related and hash table entry.  */
4973	  if (src_folded
4974	      && preferable (src_folded_cost, src_folded_regcost,
4975			     src_cost, src_regcost) <= 0
4976	      && preferable (src_folded_cost, src_folded_regcost,
4977			     src_eqv_cost, src_eqv_regcost) <= 0
4978	      && preferable (src_folded_cost, src_folded_regcost,
4979			     src_related_cost, src_related_regcost) <= 0
4980	      && preferable (src_folded_cost, src_folded_regcost,
4981			     src_elt_cost, src_elt_regcost) <= 0)
4982	    {
4983	      trial = src_folded, src_folded_cost = MAX_COST;
4984	      if (src_folded_force_flag)
4985		{
4986		  rtx forced = force_const_mem (mode, trial);
4987		  if (forced)
4988		    trial = forced;
4989		}
4990	    }
4991	  else if (src
4992		   && preferable (src_cost, src_regcost,
4993				  src_eqv_cost, src_eqv_regcost) <= 0
4994		   && preferable (src_cost, src_regcost,
4995				  src_related_cost, src_related_regcost) <= 0
4996		   && preferable (src_cost, src_regcost,
4997				  src_elt_cost, src_elt_regcost) <= 0)
4998	    trial = src, src_cost = MAX_COST;
4999	  else if (src_eqv_here
5000		   && preferable (src_eqv_cost, src_eqv_regcost,
5001				  src_related_cost, src_related_regcost) <= 0
5002		   && preferable (src_eqv_cost, src_eqv_regcost,
5003				  src_elt_cost, src_elt_regcost) <= 0)
5004	    trial = src_eqv_here, src_eqv_cost = MAX_COST;
5005	  else if (src_related
5006		   && preferable (src_related_cost, src_related_regcost,
5007				  src_elt_cost, src_elt_regcost) <= 0)
5008	    trial = src_related, src_related_cost = MAX_COST;
5009	  else
5010	    {
5011	      trial = elt->exp;
5012	      elt = elt->next_same_value;
5013	      src_elt_cost = MAX_COST;
5014	    }
5015
5016	  /* Avoid creation of overlapping memory moves.  */
5017	  if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
5018	    {
5019	      rtx src, dest;
5020
5021	      /* BLKmode moves are not handled by cse anyway.  */
5022	      if (GET_MODE (trial) == BLKmode)
5023		break;
5024
5025	      src = canon_rtx (trial);
5026	      dest = canon_rtx (SET_DEST (sets[i].rtl));
5027
5028	      if (!MEM_P (src) || !MEM_P (dest)
5029		  || !nonoverlapping_memrefs_p (src, dest))
5030		break;
5031	    }
5032
5033	  /* Try to optimize
5034	     (set (reg:M N) (const_int A))
5035	     (set (reg:M2 O) (const_int B))
5036	     (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5037		  (reg:M2 O)).  */
5038	  if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5039	      && CONST_INT_P (trial)
5040	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5041	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5042	      && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5043	      && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets[i].rtl)))
5044		  >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5045	      && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5046		  + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5047		  <= HOST_BITS_PER_WIDE_INT))
5048	    {
5049	      rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5050	      rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5051	      rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5052	      unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5053	      struct table_elt *dest_elt
5054		= lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5055	      rtx dest_cst = NULL;
5056
5057	      if (dest_elt)
5058		for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5059		  if (p->is_const && CONST_INT_P (p->exp))
5060		    {
5061		      dest_cst = p->exp;
5062		      break;
5063		    }
5064	      if (dest_cst)
5065		{
5066		  HOST_WIDE_INT val = INTVAL (dest_cst);
5067		  HOST_WIDE_INT mask;
5068		  unsigned int shift;
5069		  if (BITS_BIG_ENDIAN)
5070		    shift = GET_MODE_BITSIZE (GET_MODE (dest_reg))
5071			    - INTVAL (pos) - INTVAL (width);
5072		  else
5073		    shift = INTVAL (pos);
5074		  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5075		    mask = ~(HOST_WIDE_INT) 0;
5076		  else
5077		    mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5078		  val &= ~(mask << shift);
5079		  val |= (INTVAL (trial) & mask) << shift;
5080		  val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5081		  validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5082					   dest_reg, 1);
5083		  validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5084					   GEN_INT (val), 1);
5085		  if (apply_change_group ())
5086		    {
5087		      rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5088		      if (note)
5089			{
5090			  remove_note (insn, note);
5091			  df_notes_rescan (insn);
5092			}
5093		      src_eqv = NULL_RTX;
5094		      src_eqv_elt = NULL;
5095		      src_eqv_volatile = 0;
5096		      src_eqv_in_memory = 0;
5097		      src_eqv_hash = 0;
5098		      repeat = true;
5099		      break;
5100		    }
5101		}
5102	    }
5103
5104	  /* We don't normally have an insn matching (set (pc) (pc)), so
5105	     check for this separately here.  We will delete such an
5106	     insn below.
5107
5108	     For other cases such as a table jump or conditional jump
5109	     where we know the ultimate target, go ahead and replace the
5110	     operand.  While that may not make a valid insn, we will
5111	     reemit the jump below (and also insert any necessary
5112	     barriers).  */
5113	  if (n_sets == 1 && dest == pc_rtx
5114	      && (trial == pc_rtx
5115		  || (GET_CODE (trial) == LABEL_REF
5116		      && ! condjump_p (insn))))
5117	    {
5118	      /* Don't substitute non-local labels, this confuses CFG.  */
5119	      if (GET_CODE (trial) == LABEL_REF
5120		  && LABEL_REF_NONLOCAL_P (trial))
5121		continue;
5122
5123	      SET_SRC (sets[i].rtl) = trial;
5124	      cse_jumps_altered = true;
5125	      break;
5126	    }
5127
5128	  /* Reject certain invalid forms of CONST that we create.  */
5129	  else if (CONSTANT_P (trial)
5130		   && GET_CODE (trial) == CONST
5131		   /* Reject cases that will cause decode_rtx_const to
5132		      die.  On the alpha when simplifying a switch, we
5133		      get (const (truncate (minus (label_ref)
5134		      (label_ref)))).  */
5135		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5136		       /* Likewise on IA-64, except without the
5137			  truncate.  */
5138		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5139			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5140			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5141	    /* Do nothing for this case.  */
5142	    ;
5143
5144	  /* Look for a substitution that makes a valid insn.  */
5145	  else if (validate_unshare_change
5146		     (insn, &SET_SRC (sets[i].rtl), trial, 0))
5147	    {
5148	      rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5149
5150	      /* The result of apply_change_group can be ignored; see
5151		 canon_reg.  */
5152
5153	      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5154	      apply_change_group ();
5155
5156	      break;
5157	    }
5158
5159	  /* If we previously found constant pool entries for
5160	     constants and this is a constant, try making a
5161	     pool entry.  Put it in src_folded unless we already have done
5162	     this since that is where it likely came from.  */
5163
5164	  else if (constant_pool_entries_cost
5165		   && CONSTANT_P (trial)
5166		   && (src_folded == 0
5167		       || (!MEM_P (src_folded)
5168			   && ! src_folded_force_flag))
5169		   && GET_MODE_CLASS (mode) != MODE_CC
5170		   && mode != VOIDmode)
5171	    {
5172	      src_folded_force_flag = 1;
5173	      src_folded = trial;
5174	      src_folded_cost = constant_pool_entries_cost;
5175	      src_folded_regcost = constant_pool_entries_regcost;
5176	    }
5177	}
5178
5179      /* If we changed the insn too much, handle this set from scratch.  */
5180      if (repeat)
5181	{
5182	  i--;
5183	  continue;
5184	}
5185
5186      src = SET_SRC (sets[i].rtl);
5187
5188      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5189	 However, there is an important exception:  If both are registers
5190	 that are not the head of their equivalence class, replace SET_SRC
5191	 with the head of the class.  If we do not do this, we will have
5192	 both registers live over a portion of the basic block.  This way,
5193	 their lifetimes will likely abut instead of overlapping.  */
5194      if (REG_P (dest)
5195	  && REGNO_QTY_VALID_P (REGNO (dest)))
5196	{
5197	  int dest_q = REG_QTY (REGNO (dest));
5198	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5199
5200	  if (dest_ent->mode == GET_MODE (dest)
5201	      && dest_ent->first_reg != REGNO (dest)
5202	      && REG_P (src) && REGNO (src) == REGNO (dest)
5203	      /* Don't do this if the original insn had a hard reg as
5204		 SET_SRC or SET_DEST.  */
5205	      && (!REG_P (sets[i].src)
5206		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5207	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5208	    /* We can't call canon_reg here because it won't do anything if
5209	       SRC is a hard register.  */
5210	    {
5211	      int src_q = REG_QTY (REGNO (src));
5212	      struct qty_table_elem *src_ent = &qty_table[src_q];
5213	      int first = src_ent->first_reg;
5214	      rtx new_src
5215		= (first >= FIRST_PSEUDO_REGISTER
5216		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5217
5218	      /* We must use validate-change even for this, because this
5219		 might be a special no-op instruction, suitable only to
5220		 tag notes onto.  */
5221	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5222		{
5223		  src = new_src;
5224		  /* If we had a constant that is cheaper than what we are now
5225		     setting SRC to, use that constant.  We ignored it when we
5226		     thought we could make this into a no-op.  */
5227		  if (src_const && COST (src_const) < COST (src)
5228		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5229					  src_const, 0))
5230		    src = src_const;
5231		}
5232	    }
5233	}
5234
5235      /* If we made a change, recompute SRC values.  */
5236      if (src != sets[i].src)
5237	{
5238	  do_not_record = 0;
5239	  hash_arg_in_memory = 0;
5240	  sets[i].src = src;
5241	  sets[i].src_hash = HASH (src, mode);
5242	  sets[i].src_volatile = do_not_record;
5243	  sets[i].src_in_memory = hash_arg_in_memory;
5244	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5245	}
5246
5247      /* If this is a single SET, we are setting a register, and we have an
5248	 equivalent constant, we want to add a REG_NOTE.   We don't want
5249	 to write a REG_EQUAL note for a constant pseudo since verifying that
5250	 that pseudo hasn't been eliminated is a pain.  Such a note also
5251	 won't help anything.
5252
5253	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5254	 which can be created for a reference to a compile time computable
5255	 entry in a jump table.  */
5256
5257      if (n_sets == 1 && src_const && REG_P (dest)
5258	  && !REG_P (src_const)
5259	  && ! (GET_CODE (src_const) == CONST
5260		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5261		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5262		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5263	{
5264	  /* We only want a REG_EQUAL note if src_const != src.  */
5265	  if (! rtx_equal_p (src, src_const))
5266	    {
5267	      /* Make sure that the rtx is not shared.  */
5268	      src_const = copy_rtx (src_const);
5269
5270	      /* Record the actual constant value in a REG_EQUAL note,
5271		 making a new one if one does not already exist.  */
5272	      set_unique_reg_note (insn, REG_EQUAL, src_const);
5273	      df_notes_rescan (insn);
5274	    }
5275	}
5276
5277      /* Now deal with the destination.  */
5278      do_not_record = 0;
5279
5280      /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5281      while (GET_CODE (dest) == SUBREG
5282	     || GET_CODE (dest) == ZERO_EXTRACT
5283	     || GET_CODE (dest) == STRICT_LOW_PART)
5284	dest = XEXP (dest, 0);
5285
5286      sets[i].inner_dest = dest;
5287
5288      if (MEM_P (dest))
5289	{
5290#ifdef PUSH_ROUNDING
5291	  /* Stack pushes invalidate the stack pointer.  */
5292	  rtx addr = XEXP (dest, 0);
5293	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5294	      && XEXP (addr, 0) == stack_pointer_rtx)
5295	    invalidate (stack_pointer_rtx, VOIDmode);
5296#endif
5297	  dest = fold_rtx (dest, insn);
5298	}
5299
5300      /* Compute the hash code of the destination now,
5301	 before the effects of this instruction are recorded,
5302	 since the register values used in the address computation
5303	 are those before this instruction.  */
5304      sets[i].dest_hash = HASH (dest, mode);
5305
5306      /* Don't enter a bit-field in the hash table
5307	 because the value in it after the store
5308	 may not equal what was stored, due to truncation.  */
5309
5310      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5311	{
5312	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5313
5314	  if (src_const != 0 && CONST_INT_P (src_const)
5315	      && CONST_INT_P (width)
5316	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5317	      && ! (INTVAL (src_const)
5318		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5319	    /* Exception: if the value is constant,
5320	       and it won't be truncated, record it.  */
5321	    ;
5322	  else
5323	    {
5324	      /* This is chosen so that the destination will be invalidated
5325		 but no new value will be recorded.
5326		 We must invalidate because sometimes constant
5327		 values can be recorded for bitfields.  */
5328	      sets[i].src_elt = 0;
5329	      sets[i].src_volatile = 1;
5330	      src_eqv = 0;
5331	      src_eqv_elt = 0;
5332	    }
5333	}
5334
5335      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5336	 the insn.  */
5337      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5338	{
5339	  /* One less use of the label this insn used to jump to.  */
5340	  delete_insn_and_edges (insn);
5341	  cse_jumps_altered = true;
5342	  /* No more processing for this set.  */
5343	  sets[i].rtl = 0;
5344	}
5345
5346      /* If this SET is now setting PC to a label, we know it used to
5347	 be a conditional or computed branch.  */
5348      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5349	       && !LABEL_REF_NONLOCAL_P (src))
5350	{
5351	  /* We reemit the jump in as many cases as possible just in
5352	     case the form of an unconditional jump is significantly
5353	     different than a computed jump or conditional jump.
5354
5355	     If this insn has multiple sets, then reemitting the
5356	     jump is nontrivial.  So instead we just force rerecognition
5357	     and hope for the best.  */
5358	  if (n_sets == 1)
5359	    {
5360	      rtx new_rtx, note;
5361
5362	      new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5363	      JUMP_LABEL (new_rtx) = XEXP (src, 0);
5364	      LABEL_NUSES (XEXP (src, 0))++;
5365
5366	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5367	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5368	      if (note)
5369		{
5370		  XEXP (note, 1) = NULL_RTX;
5371		  REG_NOTES (new_rtx) = note;
5372		}
5373
5374	      delete_insn_and_edges (insn);
5375	      insn = new_rtx;
5376	    }
5377	  else
5378	    INSN_CODE (insn) = -1;
5379
5380	  /* Do not bother deleting any unreachable code, let jump do it.  */
5381	  cse_jumps_altered = true;
5382	  sets[i].rtl = 0;
5383	}
5384
5385      /* If destination is volatile, invalidate it and then do no further
5386	 processing for this assignment.  */
5387
5388      else if (do_not_record)
5389	{
5390	  if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5391	    invalidate (dest, VOIDmode);
5392	  else if (MEM_P (dest))
5393	    invalidate (dest, VOIDmode);
5394	  else if (GET_CODE (dest) == STRICT_LOW_PART
5395		   || GET_CODE (dest) == ZERO_EXTRACT)
5396	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5397	  sets[i].rtl = 0;
5398	}
5399
5400      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5401	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5402
5403#ifdef HAVE_cc0
5404      /* If setting CC0, record what it was set to, or a constant, if it
5405	 is equivalent to a constant.  If it is being set to a floating-point
5406	 value, make a COMPARE with the appropriate constant of 0.  If we
5407	 don't do this, later code can interpret this as a test against
5408	 const0_rtx, which can cause problems if we try to put it into an
5409	 insn as a floating-point operand.  */
5410      if (dest == cc0_rtx)
5411	{
5412	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5413	  this_insn_cc0_mode = mode;
5414	  if (FLOAT_MODE_P (mode))
5415	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5416					     CONST0_RTX (mode));
5417	}
5418#endif
5419    }
5420
5421  /* Now enter all non-volatile source expressions in the hash table
5422     if they are not already present.
5423     Record their equivalence classes in src_elt.
5424     This way we can insert the corresponding destinations into
5425     the same classes even if the actual sources are no longer in them
5426     (having been invalidated).  */
5427
5428  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5429      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5430    {
5431      struct table_elt *elt;
5432      struct table_elt *classp = sets[0].src_elt;
5433      rtx dest = SET_DEST (sets[0].rtl);
5434      enum machine_mode eqvmode = GET_MODE (dest);
5435
5436      if (GET_CODE (dest) == STRICT_LOW_PART)
5437	{
5438	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5439	  classp = 0;
5440	}
5441      if (insert_regs (src_eqv, classp, 0))
5442	{
5443	  rehash_using_reg (src_eqv);
5444	  src_eqv_hash = HASH (src_eqv, eqvmode);
5445	}
5446      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5447      elt->in_memory = src_eqv_in_memory;
5448      src_eqv_elt = elt;
5449
5450      /* Check to see if src_eqv_elt is the same as a set source which
5451	 does not yet have an elt, and if so set the elt of the set source
5452	 to src_eqv_elt.  */
5453      for (i = 0; i < n_sets; i++)
5454	if (sets[i].rtl && sets[i].src_elt == 0
5455	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5456	  sets[i].src_elt = src_eqv_elt;
5457    }
5458
5459  for (i = 0; i < n_sets; i++)
5460    if (sets[i].rtl && ! sets[i].src_volatile
5461	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5462      {
5463	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5464	  {
5465	    /* REG_EQUAL in setting a STRICT_LOW_PART
5466	       gives an equivalent for the entire destination register,
5467	       not just for the subreg being stored in now.
5468	       This is a more interesting equivalence, so we arrange later
5469	       to treat the entire reg as the destination.  */
5470	    sets[i].src_elt = src_eqv_elt;
5471	    sets[i].src_hash = src_eqv_hash;
5472	  }
5473	else
5474	  {
5475	    /* Insert source and constant equivalent into hash table, if not
5476	       already present.  */
5477	    struct table_elt *classp = src_eqv_elt;
5478	    rtx src = sets[i].src;
5479	    rtx dest = SET_DEST (sets[i].rtl);
5480	    enum machine_mode mode
5481	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5482
5483	    /* It's possible that we have a source value known to be
5484	       constant but don't have a REG_EQUAL note on the insn.
5485	       Lack of a note will mean src_eqv_elt will be NULL.  This
5486	       can happen where we've generated a SUBREG to access a
5487	       CONST_INT that is already in a register in a wider mode.
5488	       Ensure that the source expression is put in the proper
5489	       constant class.  */
5490	    if (!classp)
5491	      classp = sets[i].src_const_elt;
5492
5493	    if (sets[i].src_elt == 0)
5494	      {
5495		struct table_elt *elt;
5496
5497		/* Note that these insert_regs calls cannot remove
5498		   any of the src_elt's, because they would have failed to
5499		   match if not still valid.  */
5500		if (insert_regs (src, classp, 0))
5501		  {
5502		    rehash_using_reg (src);
5503		    sets[i].src_hash = HASH (src, mode);
5504		  }
5505		elt = insert (src, classp, sets[i].src_hash, mode);
5506		elt->in_memory = sets[i].src_in_memory;
5507		sets[i].src_elt = classp = elt;
5508	      }
5509	    if (sets[i].src_const && sets[i].src_const_elt == 0
5510		&& src != sets[i].src_const
5511		&& ! rtx_equal_p (sets[i].src_const, src))
5512	      sets[i].src_elt = insert (sets[i].src_const, classp,
5513					sets[i].src_const_hash, mode);
5514	  }
5515      }
5516    else if (sets[i].src_elt == 0)
5517      /* If we did not insert the source into the hash table (e.g., it was
5518	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5519	 so that the destination goes into that class.  */
5520      sets[i].src_elt = src_eqv_elt;
5521
5522  /* Record destination addresses in the hash table.  This allows us to
5523     check if they are invalidated by other sets.  */
5524  for (i = 0; i < n_sets; i++)
5525    {
5526      if (sets[i].rtl)
5527	{
5528	  rtx x = sets[i].inner_dest;
5529	  struct table_elt *elt;
5530	  enum machine_mode mode;
5531	  unsigned hash;
5532
5533	  if (MEM_P (x))
5534	    {
5535	      x = XEXP (x, 0);
5536	      mode = GET_MODE (x);
5537	      hash = HASH (x, mode);
5538	      elt = lookup (x, hash, mode);
5539	      if (!elt)
5540		{
5541		  if (insert_regs (x, NULL, 0))
5542		    {
5543		      rtx dest = SET_DEST (sets[i].rtl);
5544
5545		      rehash_using_reg (x);
5546		      hash = HASH (x, mode);
5547		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5548		    }
5549		  elt = insert (x, NULL, hash, mode);
5550		}
5551
5552	      sets[i].dest_addr_elt = elt;
5553	    }
5554	  else
5555	    sets[i].dest_addr_elt = NULL;
5556	}
5557    }
5558
5559  invalidate_from_clobbers (x);
5560
5561  /* Some registers are invalidated by subroutine calls.  Memory is
5562     invalidated by non-constant calls.  */
5563
5564  if (CALL_P (insn))
5565    {
5566      if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5567	invalidate_memory ();
5568      invalidate_for_call ();
5569    }
5570
5571  /* Now invalidate everything set by this instruction.
5572     If a SUBREG or other funny destination is being set,
5573     sets[i].rtl is still nonzero, so here we invalidate the reg
5574     a part of which is being set.  */
5575
5576  for (i = 0; i < n_sets; i++)
5577    if (sets[i].rtl)
5578      {
5579	/* We can't use the inner dest, because the mode associated with
5580	   a ZERO_EXTRACT is significant.  */
5581	rtx dest = SET_DEST (sets[i].rtl);
5582
5583	/* Needed for registers to remove the register from its
5584	   previous quantity's chain.
5585	   Needed for memory if this is a nonvarying address, unless
5586	   we have just done an invalidate_memory that covers even those.  */
5587	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5588	  invalidate (dest, VOIDmode);
5589	else if (MEM_P (dest))
5590	  invalidate (dest, VOIDmode);
5591	else if (GET_CODE (dest) == STRICT_LOW_PART
5592		 || GET_CODE (dest) == ZERO_EXTRACT)
5593	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5594      }
5595
5596  /* A volatile ASM invalidates everything.  */
5597  if (NONJUMP_INSN_P (insn)
5598      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5599      && MEM_VOLATILE_P (PATTERN (insn)))
5600    flush_hash_table ();
5601
5602  /* Don't cse over a call to setjmp; on some machines (eg VAX)
5603     the regs restored by the longjmp come from a later time
5604     than the setjmp.  */
5605  if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5606    {
5607      flush_hash_table ();
5608      goto done;
5609    }
5610
5611  /* Make sure registers mentioned in destinations
5612     are safe for use in an expression to be inserted.
5613     This removes from the hash table
5614     any invalid entry that refers to one of these registers.
5615
5616     We don't care about the return value from mention_regs because
5617     we are going to hash the SET_DEST values unconditionally.  */
5618
5619  for (i = 0; i < n_sets; i++)
5620    {
5621      if (sets[i].rtl)
5622	{
5623	  rtx x = SET_DEST (sets[i].rtl);
5624
5625	  if (!REG_P (x))
5626	    mention_regs (x);
5627	  else
5628	    {
5629	      /* We used to rely on all references to a register becoming
5630		 inaccessible when a register changes to a new quantity,
5631		 since that changes the hash code.  However, that is not
5632		 safe, since after HASH_SIZE new quantities we get a
5633		 hash 'collision' of a register with its own invalid
5634		 entries.  And since SUBREGs have been changed not to
5635		 change their hash code with the hash code of the register,
5636		 it wouldn't work any longer at all.  So we have to check
5637		 for any invalid references lying around now.
5638		 This code is similar to the REG case in mention_regs,
5639		 but it knows that reg_tick has been incremented, and
5640		 it leaves reg_in_table as -1 .  */
5641	      unsigned int regno = REGNO (x);
5642	      unsigned int endregno = END_REGNO (x);
5643	      unsigned int i;
5644
5645	      for (i = regno; i < endregno; i++)
5646		{
5647		  if (REG_IN_TABLE (i) >= 0)
5648		    {
5649		      remove_invalid_refs (i);
5650		      REG_IN_TABLE (i) = -1;
5651		    }
5652		}
5653	    }
5654	}
5655    }
5656
5657  /* We may have just removed some of the src_elt's from the hash table.
5658     So replace each one with the current head of the same class.
5659     Also check if destination addresses have been removed.  */
5660
5661  for (i = 0; i < n_sets; i++)
5662    if (sets[i].rtl)
5663      {
5664	if (sets[i].dest_addr_elt
5665	    && sets[i].dest_addr_elt->first_same_value == 0)
5666	  {
5667	    /* The elt was removed, which means this destination is not
5668	       valid after this instruction.  */
5669	    sets[i].rtl = NULL_RTX;
5670	  }
5671	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5672	  /* If elt was removed, find current head of same class,
5673	     or 0 if nothing remains of that class.  */
5674	  {
5675	    struct table_elt *elt = sets[i].src_elt;
5676
5677	    while (elt && elt->prev_same_value)
5678	      elt = elt->prev_same_value;
5679
5680	    while (elt && elt->first_same_value == 0)
5681	      elt = elt->next_same_value;
5682	    sets[i].src_elt = elt ? elt->first_same_value : 0;
5683	  }
5684      }
5685
5686  /* Now insert the destinations into their equivalence classes.  */
5687
5688  for (i = 0; i < n_sets; i++)
5689    if (sets[i].rtl)
5690      {
5691	rtx dest = SET_DEST (sets[i].rtl);
5692	struct table_elt *elt;
5693
5694	/* Don't record value if we are not supposed to risk allocating
5695	   floating-point values in registers that might be wider than
5696	   memory.  */
5697	if ((flag_float_store
5698	     && MEM_P (dest)
5699	     && FLOAT_MODE_P (GET_MODE (dest)))
5700	    /* Don't record BLKmode values, because we don't know the
5701	       size of it, and can't be sure that other BLKmode values
5702	       have the same or smaller size.  */
5703	    || GET_MODE (dest) == BLKmode
5704	    /* If we didn't put a REG_EQUAL value or a source into the hash
5705	       table, there is no point is recording DEST.  */
5706	    || sets[i].src_elt == 0
5707	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5708	       or SIGN_EXTEND, don't record DEST since it can cause
5709	       some tracking to be wrong.
5710
5711	       ??? Think about this more later.  */
5712	    || (GET_CODE (dest) == SUBREG
5713		&& (GET_MODE_SIZE (GET_MODE (dest))
5714		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5715		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
5716		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5717	  continue;
5718
5719	/* STRICT_LOW_PART isn't part of the value BEING set,
5720	   and neither is the SUBREG inside it.
5721	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
5722	if (GET_CODE (dest) == STRICT_LOW_PART)
5723	  dest = SUBREG_REG (XEXP (dest, 0));
5724
5725	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5726	  /* Registers must also be inserted into chains for quantities.  */
5727	  if (insert_regs (dest, sets[i].src_elt, 1))
5728	    {
5729	      /* If `insert_regs' changes something, the hash code must be
5730		 recalculated.  */
5731	      rehash_using_reg (dest);
5732	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5733	    }
5734
5735	elt = insert (dest, sets[i].src_elt,
5736		      sets[i].dest_hash, GET_MODE (dest));
5737
5738	/* If this is a constant, insert the constant anchors with the
5739	   equivalent register-offset expressions using register DEST.  */
5740	if (targetm.const_anchor
5741	    && REG_P (dest)
5742	    && SCALAR_INT_MODE_P (GET_MODE (dest))
5743	    && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5744	  insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5745
5746	elt->in_memory = (MEM_P (sets[i].inner_dest)
5747			  && !MEM_READONLY_P (sets[i].inner_dest));
5748
5749	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5750	   narrower than M2, and both M1 and M2 are the same number of words,
5751	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5752	   make that equivalence as well.
5753
5754	   However, BAR may have equivalences for which gen_lowpart
5755	   will produce a simpler value than gen_lowpart applied to
5756	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5757	   BAR's equivalences.  If we don't get a simplified form, make
5758	   the SUBREG.  It will not be used in an equivalence, but will
5759	   cause two similar assignments to be detected.
5760
5761	   Note the loop below will find SUBREG_REG (DEST) since we have
5762	   already entered SRC and DEST of the SET in the table.  */
5763
5764	if (GET_CODE (dest) == SUBREG
5765	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5766		 / UNITS_PER_WORD)
5767		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5768	    && (GET_MODE_SIZE (GET_MODE (dest))
5769		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5770	    && sets[i].src_elt != 0)
5771	  {
5772	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5773	    struct table_elt *elt, *classp = 0;
5774
5775	    for (elt = sets[i].src_elt->first_same_value; elt;
5776		 elt = elt->next_same_value)
5777	      {
5778		rtx new_src = 0;
5779		unsigned src_hash;
5780		struct table_elt *src_elt;
5781		int byte = 0;
5782
5783		/* Ignore invalid entries.  */
5784		if (!REG_P (elt->exp)
5785		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5786		  continue;
5787
5788		/* We may have already been playing subreg games.  If the
5789		   mode is already correct for the destination, use it.  */
5790		if (GET_MODE (elt->exp) == new_mode)
5791		  new_src = elt->exp;
5792		else
5793		  {
5794		    /* Calculate big endian correction for the SUBREG_BYTE.
5795		       We have already checked that M1 (GET_MODE (dest))
5796		       is not narrower than M2 (new_mode).  */
5797		    if (BYTES_BIG_ENDIAN)
5798		      byte = (GET_MODE_SIZE (GET_MODE (dest))
5799			      - GET_MODE_SIZE (new_mode));
5800
5801		    new_src = simplify_gen_subreg (new_mode, elt->exp,
5802					           GET_MODE (dest), byte);
5803		  }
5804
5805		/* The call to simplify_gen_subreg fails if the value
5806		   is VOIDmode, yet we can't do any simplification, e.g.
5807		   for EXPR_LISTs denoting function call results.
5808		   It is invalid to construct a SUBREG with a VOIDmode
5809		   SUBREG_REG, hence a zero new_src means we can't do
5810		   this substitution.  */
5811		if (! new_src)
5812		  continue;
5813
5814		src_hash = HASH (new_src, new_mode);
5815		src_elt = lookup (new_src, src_hash, new_mode);
5816
5817		/* Put the new source in the hash table is if isn't
5818		   already.  */
5819		if (src_elt == 0)
5820		  {
5821		    if (insert_regs (new_src, classp, 0))
5822		      {
5823			rehash_using_reg (new_src);
5824			src_hash = HASH (new_src, new_mode);
5825		      }
5826		    src_elt = insert (new_src, classp, src_hash, new_mode);
5827		    src_elt->in_memory = elt->in_memory;
5828		  }
5829		else if (classp && classp != src_elt->first_same_value)
5830		  /* Show that two things that we've seen before are
5831		     actually the same.  */
5832		  merge_equiv_classes (src_elt, classp);
5833
5834		classp = src_elt->first_same_value;
5835		/* Ignore invalid entries.  */
5836		while (classp
5837		       && !REG_P (classp->exp)
5838		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5839		  classp = classp->next_same_value;
5840	      }
5841	  }
5842      }
5843
5844  /* Special handling for (set REG0 REG1) where REG0 is the
5845     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
5846     be used in the sequel, so (if easily done) change this insn to
5847     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5848     that computed their value.  Then REG1 will become a dead store
5849     and won't cloud the situation for later optimizations.
5850
5851     Do not make this change if REG1 is a hard register, because it will
5852     then be used in the sequel and we may be changing a two-operand insn
5853     into a three-operand insn.
5854
5855     Also do not do this if we are operating on a copy of INSN.  */
5856
5857  if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
5858      && NEXT_INSN (PREV_INSN (insn)) == insn
5859      && REG_P (SET_SRC (sets[0].rtl))
5860      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5861      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5862    {
5863      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5864      struct qty_table_elem *src_ent = &qty_table[src_q];
5865
5866      if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5867	{
5868	  /* Scan for the previous nonnote insn, but stop at a basic
5869	     block boundary.  */
5870	  rtx prev = insn;
5871	  rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
5872	  do
5873	    {
5874	      prev = PREV_INSN (prev);
5875	    }
5876	  while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
5877
5878	  /* Do not swap the registers around if the previous instruction
5879	     attaches a REG_EQUIV note to REG1.
5880
5881	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
5882	     from the pseudo that originally shadowed an incoming argument
5883	     to another register.  Some uses of REG_EQUIV might rely on it
5884	     being attached to REG1 rather than REG2.
5885
5886	     This section previously turned the REG_EQUIV into a REG_EQUAL
5887	     note.  We cannot do that because REG_EQUIV may provide an
5888	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
5889	  if (NONJUMP_INSN_P (prev)
5890	      && GET_CODE (PATTERN (prev)) == SET
5891	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
5892	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
5893	    {
5894	      rtx dest = SET_DEST (sets[0].rtl);
5895	      rtx src = SET_SRC (sets[0].rtl);
5896	      rtx note;
5897
5898	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
5899	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
5900	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
5901	      apply_change_group ();
5902
5903	      /* If INSN has a REG_EQUAL note, and this note mentions
5904		 REG0, then we must delete it, because the value in
5905		 REG0 has changed.  If the note's value is REG1, we must
5906		 also delete it because that is now this insn's dest.  */
5907	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5908	      if (note != 0
5909		  && (reg_mentioned_p (dest, XEXP (note, 0))
5910		      || rtx_equal_p (src, XEXP (note, 0))))
5911		remove_note (insn, note);
5912	    }
5913	}
5914    }
5915
5916done:;
5917}
5918
5919/* Remove from the hash table all expressions that reference memory.  */
5920
5921static void
5922invalidate_memory (void)
5923{
5924  int i;
5925  struct table_elt *p, *next;
5926
5927  for (i = 0; i < HASH_SIZE; i++)
5928    for (p = table[i]; p; p = next)
5929      {
5930	next = p->next_same_hash;
5931	if (p->in_memory)
5932	  remove_from_table (p, i);
5933      }
5934}
5935
5936/* Perform invalidation on the basis of everything about an insn
5937   except for invalidating the actual places that are SET in it.
5938   This includes the places CLOBBERed, and anything that might
5939   alias with something that is SET or CLOBBERed.
5940
5941   X is the pattern of the insn.  */
5942
5943static void
5944invalidate_from_clobbers (rtx x)
5945{
5946  if (GET_CODE (x) == CLOBBER)
5947    {
5948      rtx ref = XEXP (x, 0);
5949      if (ref)
5950	{
5951	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
5952	      || MEM_P (ref))
5953	    invalidate (ref, VOIDmode);
5954	  else if (GET_CODE (ref) == STRICT_LOW_PART
5955		   || GET_CODE (ref) == ZERO_EXTRACT)
5956	    invalidate (XEXP (ref, 0), GET_MODE (ref));
5957	}
5958    }
5959  else if (GET_CODE (x) == PARALLEL)
5960    {
5961      int i;
5962      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5963	{
5964	  rtx y = XVECEXP (x, 0, i);
5965	  if (GET_CODE (y) == CLOBBER)
5966	    {
5967	      rtx ref = XEXP (y, 0);
5968	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
5969		  || MEM_P (ref))
5970		invalidate (ref, VOIDmode);
5971	      else if (GET_CODE (ref) == STRICT_LOW_PART
5972		       || GET_CODE (ref) == ZERO_EXTRACT)
5973		invalidate (XEXP (ref, 0), GET_MODE (ref));
5974	    }
5975	}
5976    }
5977}
5978
5979/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
5980   and replace any registers in them with either an equivalent constant
5981   or the canonical form of the register.  If we are inside an address,
5982   only do this if the address remains valid.
5983
5984   OBJECT is 0 except when within a MEM in which case it is the MEM.
5985
5986   Return the replacement for X.  */
5987
5988static rtx
5989cse_process_notes_1 (rtx x, rtx object, bool *changed)
5990{
5991  enum rtx_code code = GET_CODE (x);
5992  const char *fmt = GET_RTX_FORMAT (code);
5993  int i;
5994
5995  switch (code)
5996    {
5997    case CONST_INT:
5998    case CONST:
5999    case SYMBOL_REF:
6000    case LABEL_REF:
6001    case CONST_DOUBLE:
6002    case CONST_FIXED:
6003    case CONST_VECTOR:
6004    case PC:
6005    case CC0:
6006    case LO_SUM:
6007      return x;
6008
6009    case MEM:
6010      validate_change (x, &XEXP (x, 0),
6011		       cse_process_notes (XEXP (x, 0), x, changed), 0);
6012      return x;
6013
6014    case EXPR_LIST:
6015    case INSN_LIST:
6016      if (REG_NOTE_KIND (x) == REG_EQUAL)
6017	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6018      if (XEXP (x, 1))
6019	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6020      return x;
6021
6022    case SIGN_EXTEND:
6023    case ZERO_EXTEND:
6024    case SUBREG:
6025      {
6026	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6027	/* We don't substitute VOIDmode constants into these rtx,
6028	   since they would impede folding.  */
6029	if (GET_MODE (new_rtx) != VOIDmode)
6030	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6031	return x;
6032      }
6033
6034    case REG:
6035      i = REG_QTY (REGNO (x));
6036
6037      /* Return a constant or a constant register.  */
6038      if (REGNO_QTY_VALID_P (REGNO (x)))
6039	{
6040	  struct qty_table_elem *ent = &qty_table[i];
6041
6042	  if (ent->const_rtx != NULL_RTX
6043	      && (CONSTANT_P (ent->const_rtx)
6044		  || REG_P (ent->const_rtx)))
6045	    {
6046	      rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6047	      if (new_rtx)
6048		return copy_rtx (new_rtx);
6049	    }
6050	}
6051
6052      /* Otherwise, canonicalize this register.  */
6053      return canon_reg (x, NULL_RTX);
6054
6055    default:
6056      break;
6057    }
6058
6059  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6060    if (fmt[i] == 'e')
6061      validate_change (object, &XEXP (x, i),
6062		       cse_process_notes (XEXP (x, i), object, changed), 0);
6063
6064  return x;
6065}
6066
6067static rtx
6068cse_process_notes (rtx x, rtx object, bool *changed)
6069{
6070  rtx new_rtx = cse_process_notes_1 (x, object, changed);
6071  if (new_rtx != x)
6072    *changed = true;
6073  return new_rtx;
6074}
6075
6076
6077/* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6078
6079   DATA is a pointer to a struct cse_basic_block_data, that is used to
6080   describe the path.
6081   It is filled with a queue of basic blocks, starting with FIRST_BB
6082   and following a trace through the CFG.
6083
6084   If all paths starting at FIRST_BB have been followed, or no new path
6085   starting at FIRST_BB can be constructed, this function returns FALSE.
6086   Otherwise, DATA->path is filled and the function returns TRUE indicating
6087   that a path to follow was found.
6088
6089   If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6090   block in the path will be FIRST_BB.  */
6091
6092static bool
6093cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6094	       int follow_jumps)
6095{
6096  basic_block bb;
6097  edge e;
6098  int path_size;
6099
6100  SET_BIT (cse_visited_basic_blocks, first_bb->index);
6101
6102  /* See if there is a previous path.  */
6103  path_size = data->path_size;
6104
6105  /* There is a previous path.  Make sure it started with FIRST_BB.  */
6106  if (path_size)
6107    gcc_assert (data->path[0].bb == first_bb);
6108
6109  /* There was only one basic block in the last path.  Clear the path and
6110     return, so that paths starting at another basic block can be tried.  */
6111  if (path_size == 1)
6112    {
6113      path_size = 0;
6114      goto done;
6115    }
6116
6117  /* If the path was empty from the beginning, construct a new path.  */
6118  if (path_size == 0)
6119    data->path[path_size++].bb = first_bb;
6120  else
6121    {
6122      /* Otherwise, path_size must be equal to or greater than 2, because
6123	 a previous path exists that is at least two basic blocks long.
6124
6125	 Update the previous branch path, if any.  If the last branch was
6126	 previously along the branch edge, take the fallthrough edge now.  */
6127      while (path_size >= 2)
6128	{
6129	  basic_block last_bb_in_path, previous_bb_in_path;
6130	  edge e;
6131
6132	  --path_size;
6133	  last_bb_in_path = data->path[path_size].bb;
6134	  previous_bb_in_path = data->path[path_size - 1].bb;
6135
6136	  /* If we previously followed a path along the branch edge, try
6137	     the fallthru edge now.  */
6138	  if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6139	      && any_condjump_p (BB_END (previous_bb_in_path))
6140	      && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6141	      && e == BRANCH_EDGE (previous_bb_in_path))
6142	    {
6143	      bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6144	      if (bb != EXIT_BLOCK_PTR
6145		  && single_pred_p (bb)
6146		  /* We used to assert here that we would only see blocks
6147		     that we have not visited yet.  But we may end up
6148		     visiting basic blocks twice if the CFG has changed
6149		     in this run of cse_main, because when the CFG changes
6150		     the topological sort of the CFG also changes.  A basic
6151		     blocks that previously had more than two predecessors
6152		     may now have a single predecessor, and become part of
6153		     a path that starts at another basic block.
6154
6155		     We still want to visit each basic block only once, so
6156		     halt the path here if we have already visited BB.  */
6157		  && !TEST_BIT (cse_visited_basic_blocks, bb->index))
6158		{
6159		  SET_BIT (cse_visited_basic_blocks, bb->index);
6160		  data->path[path_size++].bb = bb;
6161		  break;
6162		}
6163	    }
6164
6165	  data->path[path_size].bb = NULL;
6166	}
6167
6168      /* If only one block remains in the path, bail.  */
6169      if (path_size == 1)
6170	{
6171	  path_size = 0;
6172	  goto done;
6173	}
6174    }
6175
6176  /* Extend the path if possible.  */
6177  if (follow_jumps)
6178    {
6179      bb = data->path[path_size - 1].bb;
6180      while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6181	{
6182	  if (single_succ_p (bb))
6183	    e = single_succ_edge (bb);
6184	  else if (EDGE_COUNT (bb->succs) == 2
6185		   && any_condjump_p (BB_END (bb)))
6186	    {
6187	      /* First try to follow the branch.  If that doesn't lead
6188		 to a useful path, follow the fallthru edge.  */
6189	      e = BRANCH_EDGE (bb);
6190	      if (!single_pred_p (e->dest))
6191		e = FALLTHRU_EDGE (bb);
6192	    }
6193	  else
6194	    e = NULL;
6195
6196	  if (e && e->dest != EXIT_BLOCK_PTR
6197	      && single_pred_p (e->dest)
6198	      /* Avoid visiting basic blocks twice.  The large comment
6199		 above explains why this can happen.  */
6200	      && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
6201	    {
6202	      basic_block bb2 = e->dest;
6203	      SET_BIT (cse_visited_basic_blocks, bb2->index);
6204	      data->path[path_size++].bb = bb2;
6205	      bb = bb2;
6206	    }
6207	  else
6208	    bb = NULL;
6209	}
6210    }
6211
6212done:
6213  data->path_size = path_size;
6214  return path_size != 0;
6215}
6216
6217/* Dump the path in DATA to file F.  NSETS is the number of sets
6218   in the path.  */
6219
6220static void
6221cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6222{
6223  int path_entry;
6224
6225  fprintf (f, ";; Following path with %d sets: ", nsets);
6226  for (path_entry = 0; path_entry < data->path_size; path_entry++)
6227    fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6228  fputc ('\n', dump_file);
6229  fflush (f);
6230}
6231
6232
6233/* Return true if BB has exception handling successor edges.  */
6234
6235static bool
6236have_eh_succ_edges (basic_block bb)
6237{
6238  edge e;
6239  edge_iterator ei;
6240
6241  FOR_EACH_EDGE (e, ei, bb->succs)
6242    if (e->flags & EDGE_EH)
6243      return true;
6244
6245  return false;
6246}
6247
6248
6249/* Scan to the end of the path described by DATA.  Return an estimate of
6250   the total number of SETs of all insns in the path.  */
6251
6252static void
6253cse_prescan_path (struct cse_basic_block_data *data)
6254{
6255  int nsets = 0;
6256  int path_size = data->path_size;
6257  int path_entry;
6258
6259  /* Scan to end of each basic block in the path.  */
6260  for (path_entry = 0; path_entry < path_size; path_entry++)
6261    {
6262      basic_block bb;
6263      rtx insn;
6264
6265      bb = data->path[path_entry].bb;
6266
6267      FOR_BB_INSNS (bb, insn)
6268	{
6269	  if (!INSN_P (insn))
6270	    continue;
6271
6272	  /* A PARALLEL can have lots of SETs in it,
6273	     especially if it is really an ASM_OPERANDS.  */
6274	  if (GET_CODE (PATTERN (insn)) == PARALLEL)
6275	    nsets += XVECLEN (PATTERN (insn), 0);
6276	  else
6277	    nsets += 1;
6278	}
6279    }
6280
6281  data->nsets = nsets;
6282}
6283
6284/* Process a single extended basic block described by EBB_DATA.  */
6285
6286static void
6287cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6288{
6289  int path_size = ebb_data->path_size;
6290  int path_entry;
6291  int num_insns = 0;
6292
6293  /* Allocate the space needed by qty_table.  */
6294  qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6295
6296  new_basic_block ();
6297  cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6298  cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6299  for (path_entry = 0; path_entry < path_size; path_entry++)
6300    {
6301      basic_block bb;
6302      rtx insn;
6303
6304      bb = ebb_data->path[path_entry].bb;
6305
6306      /* Invalidate recorded information for eh regs if there is an EH
6307	 edge pointing to that bb.  */
6308      if (bb_has_eh_pred (bb))
6309	{
6310	  df_ref *def_rec;
6311
6312	  for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
6313	    {
6314	      df_ref def = *def_rec;
6315	      if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6316		invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6317	    }
6318	}
6319
6320      FOR_BB_INSNS (bb, insn)
6321	{
6322	  optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6323	  /* If we have processed 1,000 insns, flush the hash table to
6324	     avoid extreme quadratic behavior.  We must not include NOTEs
6325	     in the count since there may be more of them when generating
6326	     debugging information.  If we clear the table at different
6327	     times, code generated with -g -O might be different than code
6328	     generated with -O but not -g.
6329
6330	     FIXME: This is a real kludge and needs to be done some other
6331		    way.  */
6332	  if (NONDEBUG_INSN_P (insn)
6333	      && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6334	    {
6335	      flush_hash_table ();
6336	      num_insns = 0;
6337	    }
6338
6339	  if (INSN_P (insn))
6340	    {
6341	      /* Process notes first so we have all notes in canonical forms
6342		 when looking for duplicate operations.  */
6343	      if (REG_NOTES (insn))
6344		{
6345		  bool changed = false;
6346		  REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6347						        NULL_RTX, &changed);
6348		  if (changed)
6349		    df_notes_rescan (insn);
6350		}
6351
6352	      cse_insn (insn);
6353
6354	      /* If we haven't already found an insn where we added a LABEL_REF,
6355		 check this one.  */
6356	      if (INSN_P (insn) && !recorded_label_ref
6357		  && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6358				   (void *) insn))
6359		recorded_label_ref = true;
6360
6361#ifdef HAVE_cc0
6362	      if (NONDEBUG_INSN_P (insn))
6363		{
6364		  /* If the previous insn sets CC0 and this insn no
6365		     longer references CC0, delete the previous insn.
6366		     Here we use fact that nothing expects CC0 to be
6367		     valid over an insn, which is true until the final
6368		     pass.  */
6369		  rtx prev_insn, tem;
6370
6371		  prev_insn = prev_nonnote_nondebug_insn (insn);
6372		  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6373		      && (tem = single_set (prev_insn)) != NULL_RTX
6374		      && SET_DEST (tem) == cc0_rtx
6375		      && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6376		    delete_insn (prev_insn);
6377
6378		  /* If this insn is not the last insn in the basic
6379		     block, it will be PREV_INSN(insn) in the next
6380		     iteration.  If we recorded any CC0-related
6381		     information for this insn, remember it.  */
6382		  if (insn != BB_END (bb))
6383		    {
6384		      prev_insn_cc0 = this_insn_cc0;
6385		      prev_insn_cc0_mode = this_insn_cc0_mode;
6386		    }
6387		}
6388#endif
6389	    }
6390	}
6391
6392      /* With non-call exceptions, we are not always able to update
6393	 the CFG properly inside cse_insn.  So clean up possibly
6394	 redundant EH edges here.  */
6395      if (flag_non_call_exceptions && have_eh_succ_edges (bb))
6396	cse_cfg_altered |= purge_dead_edges (bb);
6397
6398      /* If we changed a conditional jump, we may have terminated
6399	 the path we are following.  Check that by verifying that
6400	 the edge we would take still exists.  If the edge does
6401	 not exist anymore, purge the remainder of the path.
6402	 Note that this will cause us to return to the caller.  */
6403      if (path_entry < path_size - 1)
6404	{
6405	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6406	  if (!find_edge (bb, next_bb))
6407	    {
6408	      do
6409		{
6410		  path_size--;
6411
6412		  /* If we truncate the path, we must also reset the
6413		     visited bit on the remaining blocks in the path,
6414		     or we will never visit them at all.  */
6415		  RESET_BIT (cse_visited_basic_blocks,
6416			     ebb_data->path[path_size].bb->index);
6417		  ebb_data->path[path_size].bb = NULL;
6418		}
6419	      while (path_size - 1 != path_entry);
6420	      ebb_data->path_size = path_size;
6421	    }
6422	}
6423
6424      /* If this is a conditional jump insn, record any known
6425	 equivalences due to the condition being tested.  */
6426      insn = BB_END (bb);
6427      if (path_entry < path_size - 1
6428	  && JUMP_P (insn)
6429	  && single_set (insn)
6430	  && any_condjump_p (insn))
6431	{
6432	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6433	  bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6434	  record_jump_equiv (insn, taken);
6435	}
6436
6437#ifdef HAVE_cc0
6438      /* Clear the CC0-tracking related insns, they can't provide
6439	 useful information across basic block boundaries.  */
6440      prev_insn_cc0 = 0;
6441#endif
6442    }
6443
6444  gcc_assert (next_qty <= max_qty);
6445
6446  free (qty_table);
6447}
6448
6449
6450/* Perform cse on the instructions of a function.
6451   F is the first instruction.
6452   NREGS is one plus the highest pseudo-reg number used in the instruction.
6453
6454   Return 2 if jump optimizations should be redone due to simplifications
6455   in conditional jump instructions.
6456   Return 1 if the CFG should be cleaned up because it has been modified.
6457   Return 0 otherwise.  */
6458
6459int
6460cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
6461{
6462  struct cse_basic_block_data ebb_data;
6463  basic_block bb;
6464  int *rc_order = XNEWVEC (int, last_basic_block);
6465  int i, n_blocks;
6466
6467  df_set_flags (DF_LR_RUN_DCE);
6468  df_analyze ();
6469  df_set_flags (DF_DEFER_INSN_RESCAN);
6470
6471  reg_scan (get_insns (), max_reg_num ());
6472  init_cse_reg_info (nregs);
6473
6474  ebb_data.path = XNEWVEC (struct branch_path,
6475			   PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6476
6477  cse_cfg_altered = false;
6478  cse_jumps_altered = false;
6479  recorded_label_ref = false;
6480  constant_pool_entries_cost = 0;
6481  constant_pool_entries_regcost = 0;
6482  ebb_data.path_size = 0;
6483  ebb_data.nsets = 0;
6484  rtl_hooks = cse_rtl_hooks;
6485
6486  init_recog ();
6487  init_alias_analysis ();
6488
6489  reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6490
6491  /* Set up the table of already visited basic blocks.  */
6492  cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6493  sbitmap_zero (cse_visited_basic_blocks);
6494
6495  /* Loop over basic blocks in reverse completion order (RPO),
6496     excluding the ENTRY and EXIT blocks.  */
6497  n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6498  i = 0;
6499  while (i < n_blocks)
6500    {
6501      /* Find the first block in the RPO queue that we have not yet
6502	 processed before.  */
6503      do
6504	{
6505	  bb = BASIC_BLOCK (rc_order[i++]);
6506	}
6507      while (TEST_BIT (cse_visited_basic_blocks, bb->index)
6508	     && i < n_blocks);
6509
6510      /* Find all paths starting with BB, and process them.  */
6511      while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6512	{
6513	  /* Pre-scan the path.  */
6514	  cse_prescan_path (&ebb_data);
6515
6516	  /* If this basic block has no sets, skip it.  */
6517	  if (ebb_data.nsets == 0)
6518	    continue;
6519
6520	  /* Get a reasonable estimate for the maximum number of qty's
6521	     needed for this path.  For this, we take the number of sets
6522	     and multiply that by MAX_RECOG_OPERANDS.  */
6523	  max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6524
6525	  /* Dump the path we're about to process.  */
6526	  if (dump_file)
6527	    cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6528
6529	  cse_extended_basic_block (&ebb_data);
6530	}
6531    }
6532
6533  /* Clean up.  */
6534  end_alias_analysis ();
6535  free (reg_eqv_table);
6536  free (ebb_data.path);
6537  sbitmap_free (cse_visited_basic_blocks);
6538  free (rc_order);
6539  rtl_hooks = general_rtl_hooks;
6540
6541  if (cse_jumps_altered || recorded_label_ref)
6542    return 2;
6543  else if (cse_cfg_altered)
6544    return 1;
6545  else
6546    return 0;
6547}
6548
6549/* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6550   which there isn't a REG_LABEL_OPERAND note.
6551   Return one if so.  DATA is the insn.  */
6552
6553static int
6554check_for_label_ref (rtx *rtl, void *data)
6555{
6556  rtx insn = (rtx) data;
6557
6558  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6559     note for it, we must rerun jump since it needs to place the note.  If
6560     this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6561     don't do this since no REG_LABEL_OPERAND will be added.  */
6562  return (GET_CODE (*rtl) == LABEL_REF
6563	  && ! LABEL_REF_NONLOCAL_P (*rtl)
6564	  && (!JUMP_P (insn)
6565	      || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
6566	  && LABEL_P (XEXP (*rtl, 0))
6567	  && INSN_UID (XEXP (*rtl, 0)) != 0
6568	  && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
6569}
6570
6571/* Count the number of times registers are used (not set) in X.
6572   COUNTS is an array in which we accumulate the count, INCR is how much
6573   we count each register usage.
6574
6575   Don't count a usage of DEST, which is the SET_DEST of a SET which
6576   contains X in its SET_SRC.  This is because such a SET does not
6577   modify the liveness of DEST.
6578   DEST is set to pc_rtx for a trapping insn, which means that we must count
6579   uses of a SET_DEST regardless because the insn can't be deleted here.  */
6580
6581static void
6582count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6583{
6584  enum rtx_code code;
6585  rtx note;
6586  const char *fmt;
6587  int i, j;
6588
6589  if (x == 0)
6590    return;
6591
6592  switch (code = GET_CODE (x))
6593    {
6594    case REG:
6595      if (x != dest)
6596	counts[REGNO (x)] += incr;
6597      return;
6598
6599    case PC:
6600    case CC0:
6601    case CONST:
6602    case CONST_INT:
6603    case CONST_DOUBLE:
6604    case CONST_FIXED:
6605    case CONST_VECTOR:
6606    case SYMBOL_REF:
6607    case LABEL_REF:
6608      return;
6609
6610    case CLOBBER:
6611      /* If we are clobbering a MEM, mark any registers inside the address
6612         as being used.  */
6613      if (MEM_P (XEXP (x, 0)))
6614	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6615      return;
6616
6617    case SET:
6618      /* Unless we are setting a REG, count everything in SET_DEST.  */
6619      if (!REG_P (SET_DEST (x)))
6620	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6621      count_reg_usage (SET_SRC (x), counts,
6622		       dest ? dest : SET_DEST (x),
6623		       incr);
6624      return;
6625
6626    case DEBUG_INSN:
6627      return;
6628
6629    case CALL_INSN:
6630    case INSN:
6631    case JUMP_INSN:
6632      /* We expect dest to be NULL_RTX here.  If the insn may trap, mark
6633         this fact by setting DEST to pc_rtx.  */
6634      if (insn_could_throw_p (x))
6635	dest = pc_rtx;
6636      if (code == CALL_INSN)
6637	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6638      count_reg_usage (PATTERN (x), counts, dest, incr);
6639
6640      /* Things used in a REG_EQUAL note aren't dead since loop may try to
6641	 use them.  */
6642
6643      note = find_reg_equal_equiv_note (x);
6644      if (note)
6645	{
6646	  rtx eqv = XEXP (note, 0);
6647
6648	  if (GET_CODE (eqv) == EXPR_LIST)
6649	  /* This REG_EQUAL note describes the result of a function call.
6650	     Process all the arguments.  */
6651	    do
6652	      {
6653		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6654		eqv = XEXP (eqv, 1);
6655	      }
6656	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
6657	  else
6658	    count_reg_usage (eqv, counts, dest, incr);
6659	}
6660      return;
6661
6662    case EXPR_LIST:
6663      if (REG_NOTE_KIND (x) == REG_EQUAL
6664	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6665	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6666	     involving registers in the address.  */
6667	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
6668	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6669
6670      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6671      return;
6672
6673    case ASM_OPERANDS:
6674      /* If the asm is volatile, then this insn cannot be deleted,
6675	 and so the inputs *must* be live.  */
6676      if (MEM_VOLATILE_P (x))
6677	dest = NULL_RTX;
6678      /* Iterate over just the inputs, not the constraints as well.  */
6679      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6680	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6681      return;
6682
6683    case INSN_LIST:
6684      gcc_unreachable ();
6685
6686    default:
6687      break;
6688    }
6689
6690  fmt = GET_RTX_FORMAT (code);
6691  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6692    {
6693      if (fmt[i] == 'e')
6694	count_reg_usage (XEXP (x, i), counts, dest, incr);
6695      else if (fmt[i] == 'E')
6696	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6697	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6698    }
6699}
6700
6701/* Return true if a register is dead.  Can be used in for_each_rtx.  */
6702
6703static int
6704is_dead_reg (rtx *loc, void *data)
6705{
6706  rtx x = *loc;
6707  int *counts = (int *)data;
6708
6709  return (REG_P (x)
6710	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
6711	  && counts[REGNO (x)] == 0);
6712}
6713
6714/* Return true if set is live.  */
6715static bool
6716set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
6717	    int *counts)
6718{
6719#ifdef HAVE_cc0
6720  rtx tem;
6721#endif
6722
6723  if (set_noop_p (set))
6724    ;
6725
6726#ifdef HAVE_cc0
6727  else if (GET_CODE (SET_DEST (set)) == CC0
6728	   && !side_effects_p (SET_SRC (set))
6729	   && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6730	       || !INSN_P (tem)
6731	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6732    return false;
6733#endif
6734  else if (!is_dead_reg (&SET_DEST (set), counts)
6735	   || side_effects_p (SET_SRC (set)))
6736    return true;
6737  return false;
6738}
6739
6740/* Return true if insn is live.  */
6741
6742static bool
6743insn_live_p (rtx insn, int *counts)
6744{
6745  int i;
6746  if (insn_could_throw_p (insn))
6747    return true;
6748  else if (GET_CODE (PATTERN (insn)) == SET)
6749    return set_live_p (PATTERN (insn), insn, counts);
6750  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6751    {
6752      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6753	{
6754	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
6755
6756	  if (GET_CODE (elt) == SET)
6757	    {
6758	      if (set_live_p (elt, insn, counts))
6759		return true;
6760	    }
6761	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6762	    return true;
6763	}
6764      return false;
6765    }
6766  else if (DEBUG_INSN_P (insn))
6767    {
6768      rtx next;
6769
6770      for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6771	if (NOTE_P (next))
6772	  continue;
6773	else if (!DEBUG_INSN_P (next))
6774	  return true;
6775	else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6776	  return false;
6777
6778      /* If this debug insn references a dead register, drop the
6779	 location expression for now.  ??? We could try to find the
6780	 def and see if propagation is possible.  */
6781      if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn), is_dead_reg, counts))
6782	{
6783	  INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
6784	  df_insn_rescan (insn);
6785	}
6786
6787      return true;
6788    }
6789  else
6790    return true;
6791}
6792
6793/* Scan all the insns and delete any that are dead; i.e., they store a register
6794   that is never used or they copy a register to itself.
6795
6796   This is used to remove insns made obviously dead by cse, loop or other
6797   optimizations.  It improves the heuristics in loop since it won't try to
6798   move dead invariants out of loops or make givs for dead quantities.  The
6799   remaining passes of the compilation are also sped up.  */
6800
6801int
6802delete_trivially_dead_insns (rtx insns, int nreg)
6803{
6804  int *counts;
6805  rtx insn, prev;
6806  int ndead = 0;
6807
6808  timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6809  /* First count the number of times each register is used.  */
6810  counts = XCNEWVEC (int, nreg);
6811  for (insn = insns; insn; insn = NEXT_INSN (insn))
6812    if (INSN_P (insn))
6813      count_reg_usage (insn, counts, NULL_RTX, 1);
6814
6815  /* Go from the last insn to the first and delete insns that only set unused
6816     registers or copy a register to itself.  As we delete an insn, remove
6817     usage counts for registers it uses.
6818
6819     The first jump optimization pass may leave a real insn as the last
6820     insn in the function.   We must not skip that insn or we may end
6821     up deleting code that is not really dead.  */
6822  for (insn = get_last_insn (); insn; insn = prev)
6823    {
6824      int live_insn = 0;
6825
6826      prev = PREV_INSN (insn);
6827      if (!INSN_P (insn))
6828	continue;
6829
6830      live_insn = insn_live_p (insn, counts);
6831
6832      /* If this is a dead insn, delete it and show registers in it aren't
6833	 being used.  */
6834
6835      if (! live_insn && dbg_cnt (delete_trivial_dead))
6836	{
6837	  count_reg_usage (insn, counts, NULL_RTX, -1);
6838	  delete_insn_and_edges (insn);
6839	  ndead++;
6840	}
6841    }
6842
6843  if (dump_file && ndead)
6844    fprintf (dump_file, "Deleted %i trivially dead insns\n",
6845	     ndead);
6846  /* Clean up.  */
6847  free (counts);
6848  timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
6849  return ndead;
6850}
6851
6852/* This function is called via for_each_rtx.  The argument, NEWREG, is
6853   a condition code register with the desired mode.  If we are looking
6854   at the same register in a different mode, replace it with
6855   NEWREG.  */
6856
6857static int
6858cse_change_cc_mode (rtx *loc, void *data)
6859{
6860  struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
6861
6862  if (*loc
6863      && REG_P (*loc)
6864      && REGNO (*loc) == REGNO (args->newreg)
6865      && GET_MODE (*loc) != GET_MODE (args->newreg))
6866    {
6867      validate_change (args->insn, loc, args->newreg, 1);
6868
6869      return -1;
6870    }
6871  return 0;
6872}
6873
6874/* Change the mode of any reference to the register REGNO (NEWREG) to
6875   GET_MODE (NEWREG) in INSN.  */
6876
6877static void
6878cse_change_cc_mode_insn (rtx insn, rtx newreg)
6879{
6880  struct change_cc_mode_args args;
6881  int success;
6882
6883  if (!INSN_P (insn))
6884    return;
6885
6886  args.insn = insn;
6887  args.newreg = newreg;
6888
6889  for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
6890  for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
6891
6892  /* If the following assertion was triggered, there is most probably
6893     something wrong with the cc_modes_compatible back end function.
6894     CC modes only can be considered compatible if the insn - with the mode
6895     replaced by any of the compatible modes - can still be recognized.  */
6896  success = apply_change_group ();
6897  gcc_assert (success);
6898}
6899
6900/* Change the mode of any reference to the register REGNO (NEWREG) to
6901   GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
6902   any instruction which modifies NEWREG.  */
6903
6904static void
6905cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
6906{
6907  rtx insn;
6908
6909  for (insn = start; insn != end; insn = NEXT_INSN (insn))
6910    {
6911      if (! INSN_P (insn))
6912	continue;
6913
6914      if (reg_set_p (newreg, insn))
6915	return;
6916
6917      cse_change_cc_mode_insn (insn, newreg);
6918    }
6919}
6920
6921/* BB is a basic block which finishes with CC_REG as a condition code
6922   register which is set to CC_SRC.  Look through the successors of BB
6923   to find blocks which have a single predecessor (i.e., this one),
6924   and look through those blocks for an assignment to CC_REG which is
6925   equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
6926   permitted to change the mode of CC_SRC to a compatible mode.  This
6927   returns VOIDmode if no equivalent assignments were found.
6928   Otherwise it returns the mode which CC_SRC should wind up with.
6929   ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
6930   but is passed unmodified down to recursive calls in order to prevent
6931   endless recursion.
6932
6933   The main complexity in this function is handling the mode issues.
6934   We may have more than one duplicate which we can eliminate, and we
6935   try to find a mode which will work for multiple duplicates.  */
6936
6937static enum machine_mode
6938cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
6939	      bool can_change_mode)
6940{
6941  bool found_equiv;
6942  enum machine_mode mode;
6943  unsigned int insn_count;
6944  edge e;
6945  rtx insns[2];
6946  enum machine_mode modes[2];
6947  rtx last_insns[2];
6948  unsigned int i;
6949  rtx newreg;
6950  edge_iterator ei;
6951
6952  /* We expect to have two successors.  Look at both before picking
6953     the final mode for the comparison.  If we have more successors
6954     (i.e., some sort of table jump, although that seems unlikely),
6955     then we require all beyond the first two to use the same
6956     mode.  */
6957
6958  found_equiv = false;
6959  mode = GET_MODE (cc_src);
6960  insn_count = 0;
6961  FOR_EACH_EDGE (e, ei, bb->succs)
6962    {
6963      rtx insn;
6964      rtx end;
6965
6966      if (e->flags & EDGE_COMPLEX)
6967	continue;
6968
6969      if (EDGE_COUNT (e->dest->preds) != 1
6970	  || e->dest == EXIT_BLOCK_PTR
6971	  /* Avoid endless recursion on unreachable blocks.  */
6972	  || e->dest == orig_bb)
6973	continue;
6974
6975      end = NEXT_INSN (BB_END (e->dest));
6976      for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
6977	{
6978	  rtx set;
6979
6980	  if (! INSN_P (insn))
6981	    continue;
6982
6983	  /* If CC_SRC is modified, we have to stop looking for
6984	     something which uses it.  */
6985	  if (modified_in_p (cc_src, insn))
6986	    break;
6987
6988	  /* Check whether INSN sets CC_REG to CC_SRC.  */
6989	  set = single_set (insn);
6990	  if (set
6991	      && REG_P (SET_DEST (set))
6992	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
6993	    {
6994	      bool found;
6995	      enum machine_mode set_mode;
6996	      enum machine_mode comp_mode;
6997
6998	      found = false;
6999	      set_mode = GET_MODE (SET_SRC (set));
7000	      comp_mode = set_mode;
7001	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7002		found = true;
7003	      else if (GET_CODE (cc_src) == COMPARE
7004		       && GET_CODE (SET_SRC (set)) == COMPARE
7005		       && mode != set_mode
7006		       && rtx_equal_p (XEXP (cc_src, 0),
7007				       XEXP (SET_SRC (set), 0))
7008		       && rtx_equal_p (XEXP (cc_src, 1),
7009				       XEXP (SET_SRC (set), 1)))
7010
7011		{
7012		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7013		  if (comp_mode != VOIDmode
7014		      && (can_change_mode || comp_mode == mode))
7015		    found = true;
7016		}
7017
7018	      if (found)
7019		{
7020		  found_equiv = true;
7021		  if (insn_count < ARRAY_SIZE (insns))
7022		    {
7023		      insns[insn_count] = insn;
7024		      modes[insn_count] = set_mode;
7025		      last_insns[insn_count] = end;
7026		      ++insn_count;
7027
7028		      if (mode != comp_mode)
7029			{
7030			  gcc_assert (can_change_mode);
7031			  mode = comp_mode;
7032
7033			  /* The modified insn will be re-recognized later.  */
7034			  PUT_MODE (cc_src, mode);
7035			}
7036		    }
7037		  else
7038		    {
7039		      if (set_mode != mode)
7040			{
7041			  /* We found a matching expression in the
7042			     wrong mode, but we don't have room to
7043			     store it in the array.  Punt.  This case
7044			     should be rare.  */
7045			  break;
7046			}
7047		      /* INSN sets CC_REG to a value equal to CC_SRC
7048			 with the right mode.  We can simply delete
7049			 it.  */
7050		      delete_insn (insn);
7051		    }
7052
7053		  /* We found an instruction to delete.  Keep looking,
7054		     in the hopes of finding a three-way jump.  */
7055		  continue;
7056		}
7057
7058	      /* We found an instruction which sets the condition
7059		 code, so don't look any farther.  */
7060	      break;
7061	    }
7062
7063	  /* If INSN sets CC_REG in some other way, don't look any
7064	     farther.  */
7065	  if (reg_set_p (cc_reg, insn))
7066	    break;
7067	}
7068
7069      /* If we fell off the bottom of the block, we can keep looking
7070	 through successors.  We pass CAN_CHANGE_MODE as false because
7071	 we aren't prepared to handle compatibility between the
7072	 further blocks and this block.  */
7073      if (insn == end)
7074	{
7075	  enum machine_mode submode;
7076
7077	  submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7078	  if (submode != VOIDmode)
7079	    {
7080	      gcc_assert (submode == mode);
7081	      found_equiv = true;
7082	      can_change_mode = false;
7083	    }
7084	}
7085    }
7086
7087  if (! found_equiv)
7088    return VOIDmode;
7089
7090  /* Now INSN_COUNT is the number of instructions we found which set
7091     CC_REG to a value equivalent to CC_SRC.  The instructions are in
7092     INSNS.  The modes used by those instructions are in MODES.  */
7093
7094  newreg = NULL_RTX;
7095  for (i = 0; i < insn_count; ++i)
7096    {
7097      if (modes[i] != mode)
7098	{
7099	  /* We need to change the mode of CC_REG in INSNS[i] and
7100	     subsequent instructions.  */
7101	  if (! newreg)
7102	    {
7103	      if (GET_MODE (cc_reg) == mode)
7104		newreg = cc_reg;
7105	      else
7106		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7107	    }
7108	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7109				    newreg);
7110	}
7111
7112      delete_insn_and_edges (insns[i]);
7113    }
7114
7115  return mode;
7116}
7117
7118/* If we have a fixed condition code register (or two), walk through
7119   the instructions and try to eliminate duplicate assignments.  */
7120
7121static void
7122cse_condition_code_reg (void)
7123{
7124  unsigned int cc_regno_1;
7125  unsigned int cc_regno_2;
7126  rtx cc_reg_1;
7127  rtx cc_reg_2;
7128  basic_block bb;
7129
7130  if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7131    return;
7132
7133  cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7134  if (cc_regno_2 != INVALID_REGNUM)
7135    cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7136  else
7137    cc_reg_2 = NULL_RTX;
7138
7139  FOR_EACH_BB (bb)
7140    {
7141      rtx last_insn;
7142      rtx cc_reg;
7143      rtx insn;
7144      rtx cc_src_insn;
7145      rtx cc_src;
7146      enum machine_mode mode;
7147      enum machine_mode orig_mode;
7148
7149      /* Look for blocks which end with a conditional jump based on a
7150	 condition code register.  Then look for the instruction which
7151	 sets the condition code register.  Then look through the
7152	 successor blocks for instructions which set the condition
7153	 code register to the same value.  There are other possible
7154	 uses of the condition code register, but these are by far the
7155	 most common and the ones which we are most likely to be able
7156	 to optimize.  */
7157
7158      last_insn = BB_END (bb);
7159      if (!JUMP_P (last_insn))
7160	continue;
7161
7162      if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7163	cc_reg = cc_reg_1;
7164      else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7165	cc_reg = cc_reg_2;
7166      else
7167	continue;
7168
7169      cc_src_insn = NULL_RTX;
7170      cc_src = NULL_RTX;
7171      for (insn = PREV_INSN (last_insn);
7172	   insn && insn != PREV_INSN (BB_HEAD (bb));
7173	   insn = PREV_INSN (insn))
7174	{
7175	  rtx set;
7176
7177	  if (! INSN_P (insn))
7178	    continue;
7179	  set = single_set (insn);
7180	  if (set
7181	      && REG_P (SET_DEST (set))
7182	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7183	    {
7184	      cc_src_insn = insn;
7185	      cc_src = SET_SRC (set);
7186	      break;
7187	    }
7188	  else if (reg_set_p (cc_reg, insn))
7189	    break;
7190	}
7191
7192      if (! cc_src_insn)
7193	continue;
7194
7195      if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7196	continue;
7197
7198      /* Now CC_REG is a condition code register used for a
7199	 conditional jump at the end of the block, and CC_SRC, in
7200	 CC_SRC_INSN, is the value to which that condition code
7201	 register is set, and CC_SRC is still meaningful at the end of
7202	 the basic block.  */
7203
7204      orig_mode = GET_MODE (cc_src);
7205      mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7206      if (mode != VOIDmode)
7207	{
7208	  gcc_assert (mode == GET_MODE (cc_src));
7209	  if (mode != orig_mode)
7210	    {
7211	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7212
7213	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7214
7215	      /* Do the same in the following insns that use the
7216		 current value of CC_REG within BB.  */
7217	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7218					NEXT_INSN (last_insn),
7219					newreg);
7220	    }
7221	}
7222    }
7223}
7224
7225
7226/* Perform common subexpression elimination.  Nonzero value from
7227   `cse_main' means that jumps were simplified and some code may now
7228   be unreachable, so do jump optimization again.  */
7229static bool
7230gate_handle_cse (void)
7231{
7232  return optimize > 0;
7233}
7234
7235static unsigned int
7236rest_of_handle_cse (void)
7237{
7238  int tem;
7239
7240  if (dump_file)
7241    dump_flow_info (dump_file, dump_flags);
7242
7243  tem = cse_main (get_insns (), max_reg_num ());
7244
7245  /* If we are not running more CSE passes, then we are no longer
7246     expecting CSE to be run.  But always rerun it in a cheap mode.  */
7247  cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7248
7249  if (tem == 2)
7250    {
7251      timevar_push (TV_JUMP);
7252      rebuild_jump_labels (get_insns ());
7253      cleanup_cfg (0);
7254      timevar_pop (TV_JUMP);
7255    }
7256  else if (tem == 1 || optimize > 1)
7257    cleanup_cfg (0);
7258
7259  return 0;
7260}
7261
7262struct rtl_opt_pass pass_cse =
7263{
7264 {
7265  RTL_PASS,
7266  "cse1",                               /* name */
7267  gate_handle_cse,                      /* gate */
7268  rest_of_handle_cse,			/* execute */
7269  NULL,                                 /* sub */
7270  NULL,                                 /* next */
7271  0,                                    /* static_pass_number */
7272  TV_CSE,                               /* tv_id */
7273  0,                                    /* properties_required */
7274  0,                                    /* properties_provided */
7275  0,                                    /* properties_destroyed */
7276  0,                                    /* todo_flags_start */
7277  TODO_df_finish | TODO_verify_rtl_sharing |
7278  TODO_dump_func |
7279  TODO_ggc_collect |
7280  TODO_verify_flow,                     /* todo_flags_finish */
7281 }
7282};
7283
7284
7285static bool
7286gate_handle_cse2 (void)
7287{
7288  return optimize > 0 && flag_rerun_cse_after_loop;
7289}
7290
7291/* Run second CSE pass after loop optimizations.  */
7292static unsigned int
7293rest_of_handle_cse2 (void)
7294{
7295  int tem;
7296
7297  if (dump_file)
7298    dump_flow_info (dump_file, dump_flags);
7299
7300  tem = cse_main (get_insns (), max_reg_num ());
7301
7302  /* Run a pass to eliminate duplicated assignments to condition code
7303     registers.  We have to run this after bypass_jumps, because it
7304     makes it harder for that pass to determine whether a jump can be
7305     bypassed safely.  */
7306  cse_condition_code_reg ();
7307
7308  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7309
7310  if (tem == 2)
7311    {
7312      timevar_push (TV_JUMP);
7313      rebuild_jump_labels (get_insns ());
7314      cleanup_cfg (0);
7315      timevar_pop (TV_JUMP);
7316    }
7317  else if (tem == 1)
7318    cleanup_cfg (0);
7319
7320  cse_not_expected = 1;
7321  return 0;
7322}
7323
7324
7325struct rtl_opt_pass pass_cse2 =
7326{
7327 {
7328  RTL_PASS,
7329  "cse2",                               /* name */
7330  gate_handle_cse2,                     /* gate */
7331  rest_of_handle_cse2,			/* execute */
7332  NULL,                                 /* sub */
7333  NULL,                                 /* next */
7334  0,                                    /* static_pass_number */
7335  TV_CSE2,                              /* tv_id */
7336  0,                                    /* properties_required */
7337  0,                                    /* properties_provided */
7338  0,                                    /* properties_destroyed */
7339  0,                                    /* todo_flags_start */
7340  TODO_df_finish | TODO_verify_rtl_sharing |
7341  TODO_dump_func |
7342  TODO_ggc_collect |
7343  TODO_verify_flow                      /* todo_flags_finish */
7344 }
7345};
7346
7347static bool
7348gate_handle_cse_after_global_opts (void)
7349{
7350  return optimize > 0 && flag_rerun_cse_after_global_opts;
7351}
7352
7353/* Run second CSE pass after loop optimizations.  */
7354static unsigned int
7355rest_of_handle_cse_after_global_opts (void)
7356{
7357  int save_cfj;
7358  int tem;
7359
7360  /* We only want to do local CSE, so don't follow jumps.  */
7361  save_cfj = flag_cse_follow_jumps;
7362  flag_cse_follow_jumps = 0;
7363
7364  rebuild_jump_labels (get_insns ());
7365  tem = cse_main (get_insns (), max_reg_num ());
7366  purge_all_dead_edges ();
7367  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7368
7369  cse_not_expected = !flag_rerun_cse_after_loop;
7370
7371  /* If cse altered any jumps, rerun jump opts to clean things up.  */
7372  if (tem == 2)
7373    {
7374      timevar_push (TV_JUMP);
7375      rebuild_jump_labels (get_insns ());
7376      cleanup_cfg (0);
7377      timevar_pop (TV_JUMP);
7378    }
7379  else if (tem == 1)
7380    cleanup_cfg (0);
7381
7382  flag_cse_follow_jumps = save_cfj;
7383  return 0;
7384}
7385
7386struct rtl_opt_pass pass_cse_after_global_opts =
7387{
7388 {
7389  RTL_PASS,
7390  "cse_local",                          /* name */
7391  gate_handle_cse_after_global_opts,    /* gate */
7392  rest_of_handle_cse_after_global_opts, /* execute */
7393  NULL,                                 /* sub */
7394  NULL,                                 /* next */
7395  0,                                    /* static_pass_number */
7396  TV_CSE,                               /* tv_id */
7397  0,                                    /* properties_required */
7398  0,                                    /* properties_provided */
7399  0,                                    /* properties_destroyed */
7400  0,                                    /* todo_flags_start */
7401  TODO_df_finish | TODO_verify_rtl_sharing |
7402  TODO_dump_func |
7403  TODO_ggc_collect |
7404  TODO_verify_flow                      /* todo_flags_finish */
7405 }
7406};
7407