cse.c revision 146895
1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3   1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS.  */
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27
28#include "rtl.h"
29#include "tm_p.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "basic-block.h"
33#include "flags.h"
34#include "real.h"
35#include "insn-config.h"
36#include "recog.h"
37#include "function.h"
38#include "expr.h"
39#include "toplev.h"
40#include "output.h"
41#include "ggc.h"
42#include "timevar.h"
43#include "except.h"
44#include "target.h"
45#include "params.h"
46
47/* The basic idea of common subexpression elimination is to go
48   through the code, keeping a record of expressions that would
49   have the same value at the current scan point, and replacing
50   expressions encountered with the cheapest equivalent expression.
51
52   It is too complicated to keep track of the different possibilities
53   when control paths merge in this code; so, at each label, we forget all
54   that is known and start fresh.  This can be described as processing each
55   extended basic block separately.  We have a separate pass to perform
56   global CSE.
57
58   Note CSE can turn a conditional or computed jump into a nop or
59   an unconditional jump.  When this occurs we arrange to run the jump
60   optimizer after CSE to delete the unreachable code.
61
62   We use two data structures to record the equivalent expressions:
63   a hash table for most expressions, and a vector of "quantity
64   numbers" to record equivalent (pseudo) registers.
65
66   The use of the special data structure for registers is desirable
67   because it is faster.  It is possible because registers references
68   contain a fairly small number, the register number, taken from
69   a contiguously allocated series, and two register references are
70   identical if they have the same number.  General expressions
71   do not have any such thing, so the only way to retrieve the
72   information recorded on an expression other than a register
73   is to keep it in a hash table.
74
75Registers and "quantity numbers":
76
77   At the start of each basic block, all of the (hardware and pseudo)
78   registers used in the function are given distinct quantity
79   numbers to indicate their contents.  During scan, when the code
80   copies one register into another, we copy the quantity number.
81   When a register is loaded in any other way, we allocate a new
82   quantity number to describe the value generated by this operation.
83   `reg_qty' records what quantity a register is currently thought
84   of as containing.
85
86   All real quantity numbers are greater than or equal to zero.
87   If register N has not been assigned a quantity, reg_qty[N] will
88   equal -N - 1, which is always negative.
89
90   Quantity numbers below zero do not exist and none of the `qty_table'
91   entries should be referenced with a negative index.
92
93   We also maintain a bidirectional chain of registers for each
94   quantity number.  The `qty_table` members `first_reg' and `last_reg',
95   and `reg_eqv_table' members `next' and `prev' hold these chains.
96
97   The first register in a chain is the one whose lifespan is least local.
98   Among equals, it is the one that was seen first.
99   We replace any equivalent register with that one.
100
101   If two registers have the same quantity number, it must be true that
102   REG expressions with qty_table `mode' must be in the hash table for both
103   registers and must be in the same class.
104
105   The converse is not true.  Since hard registers may be referenced in
106   any mode, two REG expressions might be equivalent in the hash table
107   but not have the same quantity number if the quantity number of one
108   of the registers is not the same mode as those expressions.
109
110Constants and quantity numbers
111
112   When a quantity has a known constant value, that value is stored
113   in the appropriate qty_table `const_rtx'.  This is in addition to
114   putting the constant in the hash table as is usual for non-regs.
115
116   Whether a reg or a constant is preferred is determined by the configuration
117   macro CONST_COSTS and will often depend on the constant value.  In any
118   event, expressions containing constants can be simplified, by fold_rtx.
119
120   When a quantity has a known nearly constant value (such as an address
121   of a stack slot), that value is stored in the appropriate qty_table
122   `const_rtx'.
123
124   Integer constants don't have a machine mode.  However, cse
125   determines the intended machine mode from the destination
126   of the instruction that moves the constant.  The machine mode
127   is recorded in the hash table along with the actual RTL
128   constant expression so that different modes are kept separate.
129
130Other expressions:
131
132   To record known equivalences among expressions in general
133   we use a hash table called `table'.  It has a fixed number of buckets
134   that contain chains of `struct table_elt' elements for expressions.
135   These chains connect the elements whose expressions have the same
136   hash codes.
137
138   Other chains through the same elements connect the elements which
139   currently have equivalent values.
140
141   Register references in an expression are canonicalized before hashing
142   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
143   The hash code of a register reference is computed using the quantity
144   number, not the register number.
145
146   When the value of an expression changes, it is necessary to remove from the
147   hash table not just that expression but all expressions whose values
148   could be different as a result.
149
150     1. If the value changing is in memory, except in special cases
151     ANYTHING referring to memory could be changed.  That is because
152     nobody knows where a pointer does not point.
153     The function `invalidate_memory' removes what is necessary.
154
155     The special cases are when the address is constant or is
156     a constant plus a fixed register such as the frame pointer
157     or a static chain pointer.  When such addresses are stored in,
158     we can tell exactly which other such addresses must be invalidated
159     due to overlap.  `invalidate' does this.
160     All expressions that refer to non-constant
161     memory addresses are also invalidated.  `invalidate_memory' does this.
162
163     2. If the value changing is a register, all expressions
164     containing references to that register, and only those,
165     must be removed.
166
167   Because searching the entire hash table for expressions that contain
168   a register is very slow, we try to figure out when it isn't necessary.
169   Precisely, this is necessary only when expressions have been
170   entered in the hash table using this register, and then the value has
171   changed, and then another expression wants to be added to refer to
172   the register's new value.  This sequence of circumstances is rare
173   within any one basic block.
174
175   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176   reg_tick[i] is incremented whenever a value is stored in register i.
177   reg_in_table[i] holds -1 if no references to register i have been
178   entered in the table; otherwise, it contains the value reg_tick[i] had
179   when the references were entered.  If we want to enter a reference
180   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181   Until we want to enter a new entry, the mere fact that the two vectors
182   don't match makes the entries be ignored if anyone tries to match them.
183
184   Registers themselves are entered in the hash table as well as in
185   the equivalent-register chains.  However, the vectors `reg_tick'
186   and `reg_in_table' do not apply to expressions which are simple
187   register references.  These expressions are removed from the table
188   immediately when they become invalid, and this can be done even if
189   we do not immediately search for all the expressions that refer to
190   the register.
191
192   A CLOBBER rtx in an instruction invalidates its operand for further
193   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
194   invalidates everything that resides in memory.
195
196Related expressions:
197
198   Constant expressions that differ only by an additive integer
199   are called related.  When a constant expression is put in
200   the table, the related expression with no constant term
201   is also entered.  These are made to point at each other
202   so that it is possible to find out if there exists any
203   register equivalent to an expression related to a given expression.  */
204
205/* One plus largest register number used in this function.  */
206
207static int max_reg;
208
209/* One plus largest instruction UID used in this function at time of
210   cse_main call.  */
211
212static int max_insn_uid;
213
214/* Length of qty_table vector.  We know in advance we will not need
215   a quantity number this big.  */
216
217static int max_qty;
218
219/* Next quantity number to be allocated.
220   This is 1 + the largest number needed so far.  */
221
222static int next_qty;
223
224/* Per-qty information tracking.
225
226   `first_reg' and `last_reg' track the head and tail of the
227   chain of registers which currently contain this quantity.
228
229   `mode' contains the machine mode of this quantity.
230
231   `const_rtx' holds the rtx of the constant value of this
232   quantity, if known.  A summations of the frame/arg pointer
233   and a constant can also be entered here.  When this holds
234   a known value, `const_insn' is the insn which stored the
235   constant value.
236
237   `comparison_{code,const,qty}' are used to track when a
238   comparison between a quantity and some constant or register has
239   been passed.  In such a case, we know the results of the comparison
240   in case we see it again.  These members record a comparison that
241   is known to be true.  `comparison_code' holds the rtx code of such
242   a comparison, else it is set to UNKNOWN and the other two
243   comparison members are undefined.  `comparison_const' holds
244   the constant being compared against, or zero if the comparison
245   is not against a constant.  `comparison_qty' holds the quantity
246   being compared against when the result is known.  If the comparison
247   is not with a register, `comparison_qty' is -1.  */
248
249struct qty_table_elem
250{
251  rtx const_rtx;
252  rtx const_insn;
253  rtx comparison_const;
254  int comparison_qty;
255  unsigned int first_reg, last_reg;
256  /* The sizes of these fields should match the sizes of the
257     code and mode fields of struct rtx_def (see rtl.h).  */
258  ENUM_BITFIELD(rtx_code) comparison_code : 16;
259  ENUM_BITFIELD(machine_mode) mode : 8;
260};
261
262/* The table of all qtys, indexed by qty number.  */
263static struct qty_table_elem *qty_table;
264
265#ifdef HAVE_cc0
266/* For machines that have a CC0, we do not record its value in the hash
267   table since its use is guaranteed to be the insn immediately following
268   its definition and any other insn is presumed to invalidate it.
269
270   Instead, we store below the value last assigned to CC0.  If it should
271   happen to be a constant, it is stored in preference to the actual
272   assigned value.  In case it is a constant, we store the mode in which
273   the constant should be interpreted.  */
274
275static rtx prev_insn_cc0;
276static enum machine_mode prev_insn_cc0_mode;
277
278/* Previous actual insn.  0 if at first insn of basic block.  */
279
280static rtx prev_insn;
281#endif
282
283/* Insn being scanned.  */
284
285static rtx this_insn;
286
287/* Index by register number, gives the number of the next (or
288   previous) register in the chain of registers sharing the same
289   value.
290
291   Or -1 if this register is at the end of the chain.
292
293   If reg_qty[N] == N, reg_eqv_table[N].next is undefined.  */
294
295/* Per-register equivalence chain.  */
296struct reg_eqv_elem
297{
298  int next, prev;
299};
300
301/* The table of all register equivalence chains.  */
302static struct reg_eqv_elem *reg_eqv_table;
303
304struct cse_reg_info
305{
306  /* Next in hash chain.  */
307  struct cse_reg_info *hash_next;
308
309  /* The next cse_reg_info structure in the free or used list.  */
310  struct cse_reg_info *next;
311
312  /* Search key */
313  unsigned int regno;
314
315  /* The quantity number of the register's current contents.  */
316  int reg_qty;
317
318  /* The number of times the register has been altered in the current
319     basic block.  */
320  int reg_tick;
321
322  /* The REG_TICK value at which rtx's containing this register are
323     valid in the hash table.  If this does not equal the current
324     reg_tick value, such expressions existing in the hash table are
325     invalid.  */
326  int reg_in_table;
327
328  /* The SUBREG that was set when REG_TICK was last incremented.  Set
329     to -1 if the last store was to the whole register, not a subreg.  */
330  unsigned int subreg_ticked;
331};
332
333/* A free list of cse_reg_info entries.  */
334static struct cse_reg_info *cse_reg_info_free_list;
335
336/* A used list of cse_reg_info entries.  */
337static struct cse_reg_info *cse_reg_info_used_list;
338static struct cse_reg_info *cse_reg_info_used_list_end;
339
340/* A mapping from registers to cse_reg_info data structures.  */
341#define REGHASH_SHIFT	7
342#define REGHASH_SIZE	(1 << REGHASH_SHIFT)
343#define REGHASH_MASK	(REGHASH_SIZE - 1)
344static struct cse_reg_info *reg_hash[REGHASH_SIZE];
345
346#define REGHASH_FN(REGNO)	\
347	(((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
348
349/* The last lookup we did into the cse_reg_info_tree.  This allows us
350   to cache repeated lookups.  */
351static unsigned int cached_regno;
352static struct cse_reg_info *cached_cse_reg_info;
353
354/* A HARD_REG_SET containing all the hard registers for which there is
355   currently a REG expression in the hash table.  Note the difference
356   from the above variables, which indicate if the REG is mentioned in some
357   expression in the table.  */
358
359static HARD_REG_SET hard_regs_in_table;
360
361/* CUID of insn that starts the basic block currently being cse-processed.  */
362
363static int cse_basic_block_start;
364
365/* CUID of insn that ends the basic block currently being cse-processed.  */
366
367static int cse_basic_block_end;
368
369/* Vector mapping INSN_UIDs to cuids.
370   The cuids are like uids but increase monotonically always.
371   We use them to see whether a reg is used outside a given basic block.  */
372
373static int *uid_cuid;
374
375/* Highest UID in UID_CUID.  */
376static int max_uid;
377
378/* Get the cuid of an insn.  */
379
380#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
381
382/* Nonzero if this pass has made changes, and therefore it's
383   worthwhile to run the garbage collector.  */
384
385static int cse_altered;
386
387/* Nonzero if cse has altered conditional jump insns
388   in such a way that jump optimization should be redone.  */
389
390static int cse_jumps_altered;
391
392/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
393   REG_LABEL, we have to rerun jump after CSE to put in the note.  */
394static int recorded_label_ref;
395
396/* canon_hash stores 1 in do_not_record
397   if it notices a reference to CC0, PC, or some other volatile
398   subexpression.  */
399
400static int do_not_record;
401
402#ifdef LOAD_EXTEND_OP
403
404/* Scratch rtl used when looking for load-extended copy of a MEM.  */
405static rtx memory_extend_rtx;
406#endif
407
408/* canon_hash stores 1 in hash_arg_in_memory
409   if it notices a reference to memory within the expression being hashed.  */
410
411static int hash_arg_in_memory;
412
413/* The hash table contains buckets which are chains of `struct table_elt's,
414   each recording one expression's information.
415   That expression is in the `exp' field.
416
417   The canon_exp field contains a canonical (from the point of view of
418   alias analysis) version of the `exp' field.
419
420   Those elements with the same hash code are chained in both directions
421   through the `next_same_hash' and `prev_same_hash' fields.
422
423   Each set of expressions with equivalent values
424   are on a two-way chain through the `next_same_value'
425   and `prev_same_value' fields, and all point with
426   the `first_same_value' field at the first element in
427   that chain.  The chain is in order of increasing cost.
428   Each element's cost value is in its `cost' field.
429
430   The `in_memory' field is nonzero for elements that
431   involve any reference to memory.  These elements are removed
432   whenever a write is done to an unidentified location in memory.
433   To be safe, we assume that a memory address is unidentified unless
434   the address is either a symbol constant or a constant plus
435   the frame pointer or argument pointer.
436
437   The `related_value' field is used to connect related expressions
438   (that differ by adding an integer).
439   The related expressions are chained in a circular fashion.
440   `related_value' is zero for expressions for which this
441   chain is not useful.
442
443   The `cost' field stores the cost of this element's expression.
444   The `regcost' field stores the value returned by approx_reg_cost for
445   this element's expression.
446
447   The `is_const' flag is set if the element is a constant (including
448   a fixed address).
449
450   The `flag' field is used as a temporary during some search routines.
451
452   The `mode' field is usually the same as GET_MODE (`exp'), but
453   if `exp' is a CONST_INT and has no machine mode then the `mode'
454   field is the mode it was being used as.  Each constant is
455   recorded separately for each mode it is used with.  */
456
457struct table_elt
458{
459  rtx exp;
460  rtx canon_exp;
461  struct table_elt *next_same_hash;
462  struct table_elt *prev_same_hash;
463  struct table_elt *next_same_value;
464  struct table_elt *prev_same_value;
465  struct table_elt *first_same_value;
466  struct table_elt *related_value;
467  int cost;
468  int regcost;
469  /* The size of this field should match the size
470     of the mode field of struct rtx_def (see rtl.h).  */
471  ENUM_BITFIELD(machine_mode) mode : 8;
472  char in_memory;
473  char is_const;
474  char flag;
475};
476
477/* We don't want a lot of buckets, because we rarely have very many
478   things stored in the hash table, and a lot of buckets slows
479   down a lot of loops that happen frequently.  */
480#define HASH_SHIFT	5
481#define HASH_SIZE	(1 << HASH_SHIFT)
482#define HASH_MASK	(HASH_SIZE - 1)
483
484/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
485   register (hard registers may require `do_not_record' to be set).  */
486
487#define HASH(X, M)	\
488 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
489  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
490  : canon_hash (X, M)) & HASH_MASK)
491
492/* Determine whether register number N is considered a fixed register for the
493   purpose of approximating register costs.
494   It is desirable to replace other regs with fixed regs, to reduce need for
495   non-fixed hard regs.
496   A reg wins if it is either the frame pointer or designated as fixed.  */
497#define FIXED_REGNO_P(N)  \
498  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
499   || fixed_regs[N] || global_regs[N])
500
501/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
502   hard registers and pointers into the frame are the cheapest with a cost
503   of 0.  Next come pseudos with a cost of one and other hard registers with
504   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
505
506#define CHEAP_REGNO(N) \
507  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM	\
508   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM		\
509   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER)	\
510   || ((N) < FIRST_PSEUDO_REGISTER					\
511       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
512
513#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
514#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
515
516/* Get the info associated with register N.  */
517
518#define GET_CSE_REG_INFO(N)			\
519  (((N) == cached_regno && cached_cse_reg_info)	\
520   ? cached_cse_reg_info : get_cse_reg_info ((N)))
521
522/* Get the number of times this register has been updated in this
523   basic block.  */
524
525#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
526
527/* Get the point at which REG was recorded in the table.  */
528
529#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
530
531/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
532   SUBREG).  */
533
534#define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
535
536/* Get the quantity number for REG.  */
537
538#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
539
540/* Determine if the quantity number for register X represents a valid index
541   into the qty_table.  */
542
543#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
544
545static struct table_elt *table[HASH_SIZE];
546
547/* Chain of `struct table_elt's made so far for this function
548   but currently removed from the table.  */
549
550static struct table_elt *free_element_chain;
551
552/* Number of `struct table_elt' structures made so far for this function.  */
553
554static int n_elements_made;
555
556/* Maximum value `n_elements_made' has had so far in this compilation
557   for functions previously processed.  */
558
559static int max_elements_made;
560
561/* Surviving equivalence class when two equivalence classes are merged
562   by recording the effects of a jump in the last insn.  Zero if the
563   last insn was not a conditional jump.  */
564
565static struct table_elt *last_jump_equiv_class;
566
567/* Set to the cost of a constant pool reference if one was found for a
568   symbolic constant.  If this was found, it means we should try to
569   convert constants into constant pool entries if they don't fit in
570   the insn.  */
571
572static int constant_pool_entries_cost;
573static int constant_pool_entries_regcost;
574
575/* This data describes a block that will be processed by cse_basic_block.  */
576
577struct cse_basic_block_data
578{
579  /* Lowest CUID value of insns in block.  */
580  int low_cuid;
581  /* Highest CUID value of insns in block.  */
582  int high_cuid;
583  /* Total number of SETs in block.  */
584  int nsets;
585  /* Last insn in the block.  */
586  rtx last;
587  /* Size of current branch path, if any.  */
588  int path_size;
589  /* Current branch path, indicating which branches will be taken.  */
590  struct branch_path
591    {
592      /* The branch insn.  */
593      rtx branch;
594      /* Whether it should be taken or not.  AROUND is the same as taken
595	 except that it is used when the destination label is not preceded
596       by a BARRIER.  */
597      enum taken {TAKEN, NOT_TAKEN, AROUND} status;
598    } *path;
599};
600
601static bool fixed_base_plus_p (rtx x);
602static int notreg_cost (rtx, enum rtx_code);
603static int approx_reg_cost_1 (rtx *, void *);
604static int approx_reg_cost (rtx);
605static int preferrable (int, int, int, int);
606static void new_basic_block (void);
607static void make_new_qty (unsigned int, enum machine_mode);
608static void make_regs_eqv (unsigned int, unsigned int);
609static void delete_reg_equiv (unsigned int);
610static int mention_regs (rtx);
611static int insert_regs (rtx, struct table_elt *, int);
612static void remove_from_table (struct table_elt *, unsigned);
613static struct table_elt *lookup	(rtx, unsigned, enum machine_mode);
614static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
615static rtx lookup_as_function (rtx, enum rtx_code);
616static struct table_elt *insert (rtx, struct table_elt *, unsigned,
617				 enum machine_mode);
618static void merge_equiv_classes (struct table_elt *, struct table_elt *);
619static void invalidate (rtx, enum machine_mode);
620static int cse_rtx_varies_p (rtx, int);
621static void remove_invalid_refs (unsigned int);
622static void remove_invalid_subreg_refs (unsigned int, unsigned int,
623					enum machine_mode);
624static void rehash_using_reg (rtx);
625static void invalidate_memory (void);
626static void invalidate_for_call (void);
627static rtx use_related_value (rtx, struct table_elt *);
628static unsigned canon_hash (rtx, enum machine_mode);
629static unsigned canon_hash_string (const char *);
630static unsigned safe_hash (rtx, enum machine_mode);
631static int exp_equiv_p (rtx, rtx, int, int);
632static rtx canon_reg (rtx, rtx);
633static void find_best_addr (rtx, rtx *, enum machine_mode);
634static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
635					   enum machine_mode *,
636					   enum machine_mode *);
637static rtx fold_rtx (rtx, rtx);
638static rtx equiv_constant (rtx);
639static void record_jump_equiv (rtx, int);
640static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
641			      int);
642static void cse_insn (rtx, rtx);
643static int addr_affects_sp_p (rtx);
644static void invalidate_from_clobbers (rtx);
645static rtx cse_process_notes (rtx, rtx);
646static void cse_around_loop (rtx);
647static void invalidate_skipped_set (rtx, rtx, void *);
648static void invalidate_skipped_block (rtx);
649static void cse_check_loop_start (rtx, rtx, void *);
650static void cse_set_around_loop (rtx, rtx, rtx);
651static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
652static void count_reg_usage (rtx, int *, int);
653static int check_for_label_ref (rtx *, void *);
654extern void dump_class (struct table_elt*);
655static struct cse_reg_info * get_cse_reg_info (unsigned int);
656static int check_dependence (rtx *, void *);
657
658static void flush_hash_table (void);
659static bool insn_live_p (rtx, int *);
660static bool set_live_p (rtx, rtx, int *);
661static bool dead_libcall_p (rtx, int *);
662static int cse_change_cc_mode (rtx *, void *);
663static void cse_change_cc_mode_insns (rtx, rtx, rtx);
664static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
665
666/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
667   virtual regs here because the simplify_*_operation routines are called
668   by integrate.c, which is called before virtual register instantiation.  */
669
670static bool
671fixed_base_plus_p (rtx x)
672{
673  switch (GET_CODE (x))
674    {
675    case REG:
676      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
677	return true;
678      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
679	return true;
680      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
681	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
682	return true;
683      return false;
684
685    case PLUS:
686      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
687	return false;
688      return fixed_base_plus_p (XEXP (x, 0));
689
690    case ADDRESSOF:
691      return true;
692
693    default:
694      return false;
695    }
696}
697
698/* Dump the expressions in the equivalence class indicated by CLASSP.
699   This function is used only for debugging.  */
700void
701dump_class (struct table_elt *classp)
702{
703  struct table_elt *elt;
704
705  fprintf (stderr, "Equivalence chain for ");
706  print_rtl (stderr, classp->exp);
707  fprintf (stderr, ": \n");
708
709  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
710    {
711      print_rtl (stderr, elt->exp);
712      fprintf (stderr, "\n");
713    }
714}
715
716/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
717
718static int
719approx_reg_cost_1 (rtx *xp, void *data)
720{
721  rtx x = *xp;
722  int *cost_p = data;
723
724  if (x && GET_CODE (x) == REG)
725    {
726      unsigned int regno = REGNO (x);
727
728      if (! CHEAP_REGNO (regno))
729	{
730	  if (regno < FIRST_PSEUDO_REGISTER)
731	    {
732	      if (SMALL_REGISTER_CLASSES)
733		return 1;
734	      *cost_p += 2;
735	    }
736	  else
737	    *cost_p += 1;
738	}
739    }
740
741  return 0;
742}
743
744/* Return an estimate of the cost of the registers used in an rtx.
745   This is mostly the number of different REG expressions in the rtx;
746   however for some exceptions like fixed registers we use a cost of
747   0.  If any other hard register reference occurs, return MAX_COST.  */
748
749static int
750approx_reg_cost (rtx x)
751{
752  int cost = 0;
753
754  if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
755    return MAX_COST;
756
757  return cost;
758}
759
760/* Return a negative value if an rtx A, whose costs are given by COST_A
761   and REGCOST_A, is more desirable than an rtx B.
762   Return a positive value if A is less desirable, or 0 if the two are
763   equally good.  */
764static int
765preferrable (int cost_a, int regcost_a, int cost_b, int regcost_b)
766{
767  /* First, get rid of cases involving expressions that are entirely
768     unwanted.  */
769  if (cost_a != cost_b)
770    {
771      if (cost_a == MAX_COST)
772	return 1;
773      if (cost_b == MAX_COST)
774	return -1;
775    }
776
777  /* Avoid extending lifetimes of hardregs.  */
778  if (regcost_a != regcost_b)
779    {
780      if (regcost_a == MAX_COST)
781	return 1;
782      if (regcost_b == MAX_COST)
783	return -1;
784    }
785
786  /* Normal operation costs take precedence.  */
787  if (cost_a != cost_b)
788    return cost_a - cost_b;
789  /* Only if these are identical consider effects on register pressure.  */
790  if (regcost_a != regcost_b)
791    return regcost_a - regcost_b;
792  return 0;
793}
794
795/* Internal function, to compute cost when X is not a register; called
796   from COST macro to keep it simple.  */
797
798static int
799notreg_cost (rtx x, enum rtx_code outer)
800{
801  return ((GET_CODE (x) == SUBREG
802	   && GET_CODE (SUBREG_REG (x)) == REG
803	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
804	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
805	   && (GET_MODE_SIZE (GET_MODE (x))
806	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
807	   && subreg_lowpart_p (x)
808	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
809				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
810	  ? 0
811	  : rtx_cost (x, outer) * 2);
812}
813
814/* Return an estimate of the cost of computing rtx X.
815   One use is in cse, to decide which expression to keep in the hash table.
816   Another is in rtl generation, to pick the cheapest way to multiply.
817   Other uses like the latter are expected in the future.  */
818
819int
820rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
821{
822  int i, j;
823  enum rtx_code code;
824  const char *fmt;
825  int total;
826
827  if (x == 0)
828    return 0;
829
830  /* Compute the default costs of certain things.
831     Note that targetm.rtx_costs can override the defaults.  */
832
833  code = GET_CODE (x);
834  switch (code)
835    {
836    case MULT:
837      total = COSTS_N_INSNS (5);
838      break;
839    case DIV:
840    case UDIV:
841    case MOD:
842    case UMOD:
843      total = COSTS_N_INSNS (7);
844      break;
845    case USE:
846      /* Used in loop.c and combine.c as a marker.  */
847      total = 0;
848      break;
849    default:
850      total = COSTS_N_INSNS (1);
851    }
852
853  switch (code)
854    {
855    case REG:
856      return 0;
857
858    case SUBREG:
859      /* If we can't tie these modes, make this expensive.  The larger
860	 the mode, the more expensive it is.  */
861      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
862	return COSTS_N_INSNS (2
863			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
864      break;
865
866    default:
867      if ((*targetm.rtx_costs) (x, code, outer_code, &total))
868	return total;
869      break;
870    }
871
872  /* Sum the costs of the sub-rtx's, plus cost of this operation,
873     which is already in total.  */
874
875  fmt = GET_RTX_FORMAT (code);
876  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
877    if (fmt[i] == 'e')
878      total += rtx_cost (XEXP (x, i), code);
879    else if (fmt[i] == 'E')
880      for (j = 0; j < XVECLEN (x, i); j++)
881	total += rtx_cost (XVECEXP (x, i, j), code);
882
883  return total;
884}
885
886/* Return cost of address expression X.
887   Expect that X is properly formed address reference.  */
888
889int
890address_cost (rtx x, enum machine_mode mode)
891{
892  /* The address_cost target hook does not deal with ADDRESSOF nodes.  But,
893     during CSE, such nodes are present.  Using an ADDRESSOF node which
894     refers to the address of a REG is a good thing because we can then
895     turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
896
897  if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
898    return -1;
899
900  /* We may be asked for cost of various unusual addresses, such as operands
901     of push instruction.  It is not worthwhile to complicate writing
902     of the target hook by such cases.  */
903
904  if (!memory_address_p (mode, x))
905    return 1000;
906
907  return (*targetm.address_cost) (x);
908}
909
910/* If the target doesn't override, compute the cost as with arithmetic.  */
911
912int
913default_address_cost (rtx x)
914{
915  return rtx_cost (x, MEM);
916}
917
918static struct cse_reg_info *
919get_cse_reg_info (unsigned int regno)
920{
921  struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
922  struct cse_reg_info *p;
923
924  for (p = *hash_head; p != NULL; p = p->hash_next)
925    if (p->regno == regno)
926      break;
927
928  if (p == NULL)
929    {
930      /* Get a new cse_reg_info structure.  */
931      if (cse_reg_info_free_list)
932	{
933	  p = cse_reg_info_free_list;
934	  cse_reg_info_free_list = p->next;
935	}
936      else
937	p = xmalloc (sizeof (struct cse_reg_info));
938
939      /* Insert into hash table.  */
940      p->hash_next = *hash_head;
941      *hash_head = p;
942
943      /* Initialize it.  */
944      p->reg_tick = 1;
945      p->reg_in_table = -1;
946      p->subreg_ticked = -1;
947      p->reg_qty = -regno - 1;
948      p->regno = regno;
949      p->next = cse_reg_info_used_list;
950      cse_reg_info_used_list = p;
951      if (!cse_reg_info_used_list_end)
952	cse_reg_info_used_list_end = p;
953    }
954
955  /* Cache this lookup; we tend to be looking up information about the
956     same register several times in a row.  */
957  cached_regno = regno;
958  cached_cse_reg_info = p;
959
960  return p;
961}
962
963/* Clear the hash table and initialize each register with its own quantity,
964   for a new basic block.  */
965
966static void
967new_basic_block (void)
968{
969  int i;
970
971  next_qty = 0;
972
973  /* Clear out hash table state for this pass.  */
974
975  memset (reg_hash, 0, sizeof reg_hash);
976
977  if (cse_reg_info_used_list)
978    {
979      cse_reg_info_used_list_end->next = cse_reg_info_free_list;
980      cse_reg_info_free_list = cse_reg_info_used_list;
981      cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
982    }
983  cached_cse_reg_info = 0;
984
985  CLEAR_HARD_REG_SET (hard_regs_in_table);
986
987  /* The per-quantity values used to be initialized here, but it is
988     much faster to initialize each as it is made in `make_new_qty'.  */
989
990  for (i = 0; i < HASH_SIZE; i++)
991    {
992      struct table_elt *first;
993
994      first = table[i];
995      if (first != NULL)
996	{
997	  struct table_elt *last = first;
998
999	  table[i] = NULL;
1000
1001	  while (last->next_same_hash != NULL)
1002	    last = last->next_same_hash;
1003
1004	  /* Now relink this hash entire chain into
1005	     the free element list.  */
1006
1007	  last->next_same_hash = free_element_chain;
1008	  free_element_chain = first;
1009	}
1010    }
1011
1012#ifdef HAVE_cc0
1013  prev_insn = 0;
1014  prev_insn_cc0 = 0;
1015#endif
1016}
1017
1018/* Say that register REG contains a quantity in mode MODE not in any
1019   register before and initialize that quantity.  */
1020
1021static void
1022make_new_qty (unsigned int reg, enum machine_mode mode)
1023{
1024  int q;
1025  struct qty_table_elem *ent;
1026  struct reg_eqv_elem *eqv;
1027
1028  if (next_qty >= max_qty)
1029    abort ();
1030
1031  q = REG_QTY (reg) = next_qty++;
1032  ent = &qty_table[q];
1033  ent->first_reg = reg;
1034  ent->last_reg = reg;
1035  ent->mode = mode;
1036  ent->const_rtx = ent->const_insn = NULL_RTX;
1037  ent->comparison_code = UNKNOWN;
1038
1039  eqv = &reg_eqv_table[reg];
1040  eqv->next = eqv->prev = -1;
1041}
1042
1043/* Make reg NEW equivalent to reg OLD.
1044   OLD is not changing; NEW is.  */
1045
1046static void
1047make_regs_eqv (unsigned int new, unsigned int old)
1048{
1049  unsigned int lastr, firstr;
1050  int q = REG_QTY (old);
1051  struct qty_table_elem *ent;
1052
1053  ent = &qty_table[q];
1054
1055  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1056  if (! REGNO_QTY_VALID_P (old))
1057    abort ();
1058
1059  REG_QTY (new) = q;
1060  firstr = ent->first_reg;
1061  lastr = ent->last_reg;
1062
1063  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1064     hard regs.  Among pseudos, if NEW will live longer than any other reg
1065     of the same qty, and that is beyond the current basic block,
1066     make it the new canonical replacement for this qty.  */
1067  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1068      /* Certain fixed registers might be of the class NO_REGS.  This means
1069	 that not only can they not be allocated by the compiler, but
1070	 they cannot be used in substitutions or canonicalizations
1071	 either.  */
1072      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1073      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1074	  || (new >= FIRST_PSEUDO_REGISTER
1075	      && (firstr < FIRST_PSEUDO_REGISTER
1076		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1077		       || (uid_cuid[REGNO_FIRST_UID (new)]
1078			   < cse_basic_block_start))
1079		      && (uid_cuid[REGNO_LAST_UID (new)]
1080			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1081    {
1082      reg_eqv_table[firstr].prev = new;
1083      reg_eqv_table[new].next = firstr;
1084      reg_eqv_table[new].prev = -1;
1085      ent->first_reg = new;
1086    }
1087  else
1088    {
1089      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1090	 Otherwise, insert before any non-fixed hard regs that are at the
1091	 end.  Registers of class NO_REGS cannot be used as an
1092	 equivalent for anything.  */
1093      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1094	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1095	     && new >= FIRST_PSEUDO_REGISTER)
1096	lastr = reg_eqv_table[lastr].prev;
1097      reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1098      if (reg_eqv_table[lastr].next >= 0)
1099	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1100      else
1101	qty_table[q].last_reg = new;
1102      reg_eqv_table[lastr].next = new;
1103      reg_eqv_table[new].prev = lastr;
1104    }
1105}
1106
1107/* Remove REG from its equivalence class.  */
1108
1109static void
1110delete_reg_equiv (unsigned int reg)
1111{
1112  struct qty_table_elem *ent;
1113  int q = REG_QTY (reg);
1114  int p, n;
1115
1116  /* If invalid, do nothing.  */
1117  if (! REGNO_QTY_VALID_P (reg))
1118    return;
1119
1120  ent = &qty_table[q];
1121
1122  p = reg_eqv_table[reg].prev;
1123  n = reg_eqv_table[reg].next;
1124
1125  if (n != -1)
1126    reg_eqv_table[n].prev = p;
1127  else
1128    ent->last_reg = p;
1129  if (p != -1)
1130    reg_eqv_table[p].next = n;
1131  else
1132    ent->first_reg = n;
1133
1134  REG_QTY (reg) = -reg - 1;
1135}
1136
1137/* Remove any invalid expressions from the hash table
1138   that refer to any of the registers contained in expression X.
1139
1140   Make sure that newly inserted references to those registers
1141   as subexpressions will be considered valid.
1142
1143   mention_regs is not called when a register itself
1144   is being stored in the table.
1145
1146   Return 1 if we have done something that may have changed the hash code
1147   of X.  */
1148
1149static int
1150mention_regs (rtx x)
1151{
1152  enum rtx_code code;
1153  int i, j;
1154  const char *fmt;
1155  int changed = 0;
1156
1157  if (x == 0)
1158    return 0;
1159
1160  code = GET_CODE (x);
1161  if (code == REG)
1162    {
1163      unsigned int regno = REGNO (x);
1164      unsigned int endregno
1165	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1166		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1167      unsigned int i;
1168
1169      for (i = regno; i < endregno; i++)
1170	{
1171	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1172	    remove_invalid_refs (i);
1173
1174	  REG_IN_TABLE (i) = REG_TICK (i);
1175	  SUBREG_TICKED (i) = -1;
1176	}
1177
1178      return 0;
1179    }
1180
1181  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1182     pseudo if they don't use overlapping words.  We handle only pseudos
1183     here for simplicity.  */
1184  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1185      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1186    {
1187      unsigned int i = REGNO (SUBREG_REG (x));
1188
1189      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1190	{
1191	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1192	     the last store to this register really stored into this
1193	     subreg, then remove the memory of this subreg.
1194	     Otherwise, remove any memory of the entire register and
1195	     all its subregs from the table.  */
1196	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1197	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1198	    remove_invalid_refs (i);
1199	  else
1200	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1201	}
1202
1203      REG_IN_TABLE (i) = REG_TICK (i);
1204      SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1205      return 0;
1206    }
1207
1208  /* If X is a comparison or a COMPARE and either operand is a register
1209     that does not have a quantity, give it one.  This is so that a later
1210     call to record_jump_equiv won't cause X to be assigned a different
1211     hash code and not found in the table after that call.
1212
1213     It is not necessary to do this here, since rehash_using_reg can
1214     fix up the table later, but doing this here eliminates the need to
1215     call that expensive function in the most common case where the only
1216     use of the register is in the comparison.  */
1217
1218  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1219    {
1220      if (GET_CODE (XEXP (x, 0)) == REG
1221	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1222	if (insert_regs (XEXP (x, 0), NULL, 0))
1223	  {
1224	    rehash_using_reg (XEXP (x, 0));
1225	    changed = 1;
1226	  }
1227
1228      if (GET_CODE (XEXP (x, 1)) == REG
1229	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1230	if (insert_regs (XEXP (x, 1), NULL, 0))
1231	  {
1232	    rehash_using_reg (XEXP (x, 1));
1233	    changed = 1;
1234	  }
1235    }
1236
1237  fmt = GET_RTX_FORMAT (code);
1238  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1239    if (fmt[i] == 'e')
1240      changed |= mention_regs (XEXP (x, i));
1241    else if (fmt[i] == 'E')
1242      for (j = 0; j < XVECLEN (x, i); j++)
1243	changed |= mention_regs (XVECEXP (x, i, j));
1244
1245  return changed;
1246}
1247
1248/* Update the register quantities for inserting X into the hash table
1249   with a value equivalent to CLASSP.
1250   (If the class does not contain a REG, it is irrelevant.)
1251   If MODIFIED is nonzero, X is a destination; it is being modified.
1252   Note that delete_reg_equiv should be called on a register
1253   before insert_regs is done on that register with MODIFIED != 0.
1254
1255   Nonzero value means that elements of reg_qty have changed
1256   so X's hash code may be different.  */
1257
1258static int
1259insert_regs (rtx x, struct table_elt *classp, int modified)
1260{
1261  if (GET_CODE (x) == REG)
1262    {
1263      unsigned int regno = REGNO (x);
1264      int qty_valid;
1265
1266      /* If REGNO is in the equivalence table already but is of the
1267	 wrong mode for that equivalence, don't do anything here.  */
1268
1269      qty_valid = REGNO_QTY_VALID_P (regno);
1270      if (qty_valid)
1271	{
1272	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1273
1274	  if (ent->mode != GET_MODE (x))
1275	    return 0;
1276	}
1277
1278      if (modified || ! qty_valid)
1279	{
1280	  if (classp)
1281	    for (classp = classp->first_same_value;
1282		 classp != 0;
1283		 classp = classp->next_same_value)
1284	      if (GET_CODE (classp->exp) == REG
1285		  && GET_MODE (classp->exp) == GET_MODE (x))
1286		{
1287		  make_regs_eqv (regno, REGNO (classp->exp));
1288		  return 1;
1289		}
1290
1291	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1292	     than REG_IN_TABLE to find out if there was only a single preceding
1293	     invalidation - for the SUBREG - or another one, which would be
1294	     for the full register.  However, if we find here that REG_TICK
1295	     indicates that the register is invalid, it means that it has
1296	     been invalidated in a separate operation.  The SUBREG might be used
1297	     now (then this is a recursive call), or we might use the full REG
1298	     now and a SUBREG of it later.  So bump up REG_TICK so that
1299	     mention_regs will do the right thing.  */
1300	  if (! modified
1301	      && REG_IN_TABLE (regno) >= 0
1302	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1303	    REG_TICK (regno)++;
1304	  make_new_qty (regno, GET_MODE (x));
1305	  return 1;
1306	}
1307
1308      return 0;
1309    }
1310
1311  /* If X is a SUBREG, we will likely be inserting the inner register in the
1312     table.  If that register doesn't have an assigned quantity number at
1313     this point but does later, the insertion that we will be doing now will
1314     not be accessible because its hash code will have changed.  So assign
1315     a quantity number now.  */
1316
1317  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1318	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1319    {
1320      insert_regs (SUBREG_REG (x), NULL, 0);
1321      mention_regs (x);
1322      return 1;
1323    }
1324  else
1325    return mention_regs (x);
1326}
1327
1328/* Look in or update the hash table.  */
1329
1330/* Remove table element ELT from use in the table.
1331   HASH is its hash code, made using the HASH macro.
1332   It's an argument because often that is known in advance
1333   and we save much time not recomputing it.  */
1334
1335static void
1336remove_from_table (struct table_elt *elt, unsigned int hash)
1337{
1338  if (elt == 0)
1339    return;
1340
1341  /* Mark this element as removed.  See cse_insn.  */
1342  elt->first_same_value = 0;
1343
1344  /* Remove the table element from its equivalence class.  */
1345
1346  {
1347    struct table_elt *prev = elt->prev_same_value;
1348    struct table_elt *next = elt->next_same_value;
1349
1350    if (next)
1351      next->prev_same_value = prev;
1352
1353    if (prev)
1354      prev->next_same_value = next;
1355    else
1356      {
1357	struct table_elt *newfirst = next;
1358	while (next)
1359	  {
1360	    next->first_same_value = newfirst;
1361	    next = next->next_same_value;
1362	  }
1363      }
1364  }
1365
1366  /* Remove the table element from its hash bucket.  */
1367
1368  {
1369    struct table_elt *prev = elt->prev_same_hash;
1370    struct table_elt *next = elt->next_same_hash;
1371
1372    if (next)
1373      next->prev_same_hash = prev;
1374
1375    if (prev)
1376      prev->next_same_hash = next;
1377    else if (table[hash] == elt)
1378      table[hash] = next;
1379    else
1380      {
1381	/* This entry is not in the proper hash bucket.  This can happen
1382	   when two classes were merged by `merge_equiv_classes'.  Search
1383	   for the hash bucket that it heads.  This happens only very
1384	   rarely, so the cost is acceptable.  */
1385	for (hash = 0; hash < HASH_SIZE; hash++)
1386	  if (table[hash] == elt)
1387	    table[hash] = next;
1388      }
1389  }
1390
1391  /* Remove the table element from its related-value circular chain.  */
1392
1393  if (elt->related_value != 0 && elt->related_value != elt)
1394    {
1395      struct table_elt *p = elt->related_value;
1396
1397      while (p->related_value != elt)
1398	p = p->related_value;
1399      p->related_value = elt->related_value;
1400      if (p->related_value == p)
1401	p->related_value = 0;
1402    }
1403
1404  /* Now add it to the free element chain.  */
1405  elt->next_same_hash = free_element_chain;
1406  free_element_chain = elt;
1407}
1408
1409/* Look up X in the hash table and return its table element,
1410   or 0 if X is not in the table.
1411
1412   MODE is the machine-mode of X, or if X is an integer constant
1413   with VOIDmode then MODE is the mode with which X will be used.
1414
1415   Here we are satisfied to find an expression whose tree structure
1416   looks like X.  */
1417
1418static struct table_elt *
1419lookup (rtx x, unsigned int hash, enum machine_mode mode)
1420{
1421  struct table_elt *p;
1422
1423  for (p = table[hash]; p; p = p->next_same_hash)
1424    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1425			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1426      return p;
1427
1428  return 0;
1429}
1430
1431/* Like `lookup' but don't care whether the table element uses invalid regs.
1432   Also ignore discrepancies in the machine mode of a register.  */
1433
1434static struct table_elt *
1435lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1436{
1437  struct table_elt *p;
1438
1439  if (GET_CODE (x) == REG)
1440    {
1441      unsigned int regno = REGNO (x);
1442
1443      /* Don't check the machine mode when comparing registers;
1444	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1445      for (p = table[hash]; p; p = p->next_same_hash)
1446	if (GET_CODE (p->exp) == REG
1447	    && REGNO (p->exp) == regno)
1448	  return p;
1449    }
1450  else
1451    {
1452      for (p = table[hash]; p; p = p->next_same_hash)
1453	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1454	  return p;
1455    }
1456
1457  return 0;
1458}
1459
1460/* Look for an expression equivalent to X and with code CODE.
1461   If one is found, return that expression.  */
1462
1463static rtx
1464lookup_as_function (rtx x, enum rtx_code code)
1465{
1466  struct table_elt *p
1467    = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1468
1469  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1470     long as we are narrowing.  So if we looked in vain for a mode narrower
1471     than word_mode before, look for word_mode now.  */
1472  if (p == 0 && code == CONST_INT
1473      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1474    {
1475      x = copy_rtx (x);
1476      PUT_MODE (x, word_mode);
1477      p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1478    }
1479
1480  if (p == 0)
1481    return 0;
1482
1483  for (p = p->first_same_value; p; p = p->next_same_value)
1484    if (GET_CODE (p->exp) == code
1485	/* Make sure this is a valid entry in the table.  */
1486	&& exp_equiv_p (p->exp, p->exp, 1, 0))
1487      return p->exp;
1488
1489  return 0;
1490}
1491
1492/* Insert X in the hash table, assuming HASH is its hash code
1493   and CLASSP is an element of the class it should go in
1494   (or 0 if a new class should be made).
1495   It is inserted at the proper position to keep the class in
1496   the order cheapest first.
1497
1498   MODE is the machine-mode of X, or if X is an integer constant
1499   with VOIDmode then MODE is the mode with which X will be used.
1500
1501   For elements of equal cheapness, the most recent one
1502   goes in front, except that the first element in the list
1503   remains first unless a cheaper element is added.  The order of
1504   pseudo-registers does not matter, as canon_reg will be called to
1505   find the cheapest when a register is retrieved from the table.
1506
1507   The in_memory field in the hash table element is set to 0.
1508   The caller must set it nonzero if appropriate.
1509
1510   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1511   and if insert_regs returns a nonzero value
1512   you must then recompute its hash code before calling here.
1513
1514   If necessary, update table showing constant values of quantities.  */
1515
1516#define CHEAPER(X, Y) \
1517 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1518
1519static struct table_elt *
1520insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1521{
1522  struct table_elt *elt;
1523
1524  /* If X is a register and we haven't made a quantity for it,
1525     something is wrong.  */
1526  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1527    abort ();
1528
1529  /* If X is a hard register, show it is being put in the table.  */
1530  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1531    {
1532      unsigned int regno = REGNO (x);
1533      unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1534      unsigned int i;
1535
1536      for (i = regno; i < endregno; i++)
1537	SET_HARD_REG_BIT (hard_regs_in_table, i);
1538    }
1539
1540  /* Put an element for X into the right hash bucket.  */
1541
1542  elt = free_element_chain;
1543  if (elt)
1544    free_element_chain = elt->next_same_hash;
1545  else
1546    {
1547      n_elements_made++;
1548      elt = xmalloc (sizeof (struct table_elt));
1549    }
1550
1551  elt->exp = x;
1552  elt->canon_exp = NULL_RTX;
1553  elt->cost = COST (x);
1554  elt->regcost = approx_reg_cost (x);
1555  elt->next_same_value = 0;
1556  elt->prev_same_value = 0;
1557  elt->next_same_hash = table[hash];
1558  elt->prev_same_hash = 0;
1559  elt->related_value = 0;
1560  elt->in_memory = 0;
1561  elt->mode = mode;
1562  elt->is_const = (CONSTANT_P (x)
1563		   /* GNU C++ takes advantage of this for `this'
1564		      (and other const values).  */
1565		   || (GET_CODE (x) == REG
1566		       && RTX_UNCHANGING_P (x)
1567		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1568		   || fixed_base_plus_p (x));
1569
1570  if (table[hash])
1571    table[hash]->prev_same_hash = elt;
1572  table[hash] = elt;
1573
1574  /* Put it into the proper value-class.  */
1575  if (classp)
1576    {
1577      classp = classp->first_same_value;
1578      if (CHEAPER (elt, classp))
1579	/* Insert at the head of the class.  */
1580	{
1581	  struct table_elt *p;
1582	  elt->next_same_value = classp;
1583	  classp->prev_same_value = elt;
1584	  elt->first_same_value = elt;
1585
1586	  for (p = classp; p; p = p->next_same_value)
1587	    p->first_same_value = elt;
1588	}
1589      else
1590	{
1591	  /* Insert not at head of the class.  */
1592	  /* Put it after the last element cheaper than X.  */
1593	  struct table_elt *p, *next;
1594
1595	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1596	       p = next);
1597
1598	  /* Put it after P and before NEXT.  */
1599	  elt->next_same_value = next;
1600	  if (next)
1601	    next->prev_same_value = elt;
1602
1603	  elt->prev_same_value = p;
1604	  p->next_same_value = elt;
1605	  elt->first_same_value = classp;
1606	}
1607    }
1608  else
1609    elt->first_same_value = elt;
1610
1611  /* If this is a constant being set equivalent to a register or a register
1612     being set equivalent to a constant, note the constant equivalence.
1613
1614     If this is a constant, it cannot be equivalent to a different constant,
1615     and a constant is the only thing that can be cheaper than a register.  So
1616     we know the register is the head of the class (before the constant was
1617     inserted).
1618
1619     If this is a register that is not already known equivalent to a
1620     constant, we must check the entire class.
1621
1622     If this is a register that is already known equivalent to an insn,
1623     update the qtys `const_insn' to show that `this_insn' is the latest
1624     insn making that quantity equivalent to the constant.  */
1625
1626  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1627      && GET_CODE (x) != REG)
1628    {
1629      int exp_q = REG_QTY (REGNO (classp->exp));
1630      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1631
1632      exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1633      exp_ent->const_insn = this_insn;
1634    }
1635
1636  else if (GET_CODE (x) == REG
1637	   && classp
1638	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1639	   && ! elt->is_const)
1640    {
1641      struct table_elt *p;
1642
1643      for (p = classp; p != 0; p = p->next_same_value)
1644	{
1645	  if (p->is_const && GET_CODE (p->exp) != REG)
1646	    {
1647	      int x_q = REG_QTY (REGNO (x));
1648	      struct qty_table_elem *x_ent = &qty_table[x_q];
1649
1650	      x_ent->const_rtx
1651		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1652	      x_ent->const_insn = this_insn;
1653	      break;
1654	    }
1655	}
1656    }
1657
1658  else if (GET_CODE (x) == REG
1659	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1660	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1661    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1662
1663  /* If this is a constant with symbolic value,
1664     and it has a term with an explicit integer value,
1665     link it up with related expressions.  */
1666  if (GET_CODE (x) == CONST)
1667    {
1668      rtx subexp = get_related_value (x);
1669      unsigned subhash;
1670      struct table_elt *subelt, *subelt_prev;
1671
1672      if (subexp != 0)
1673	{
1674	  /* Get the integer-free subexpression in the hash table.  */
1675	  subhash = safe_hash (subexp, mode) & HASH_MASK;
1676	  subelt = lookup (subexp, subhash, mode);
1677	  if (subelt == 0)
1678	    subelt = insert (subexp, NULL, subhash, mode);
1679	  /* Initialize SUBELT's circular chain if it has none.  */
1680	  if (subelt->related_value == 0)
1681	    subelt->related_value = subelt;
1682	  /* Find the element in the circular chain that precedes SUBELT.  */
1683	  subelt_prev = subelt;
1684	  while (subelt_prev->related_value != subelt)
1685	    subelt_prev = subelt_prev->related_value;
1686	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1687	     This way the element that follows SUBELT is the oldest one.  */
1688	  elt->related_value = subelt_prev->related_value;
1689	  subelt_prev->related_value = elt;
1690	}
1691    }
1692
1693  return elt;
1694}
1695
1696/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1697   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1698   the two classes equivalent.
1699
1700   CLASS1 will be the surviving class; CLASS2 should not be used after this
1701   call.
1702
1703   Any invalid entries in CLASS2 will not be copied.  */
1704
1705static void
1706merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1707{
1708  struct table_elt *elt, *next, *new;
1709
1710  /* Ensure we start with the head of the classes.  */
1711  class1 = class1->first_same_value;
1712  class2 = class2->first_same_value;
1713
1714  /* If they were already equal, forget it.  */
1715  if (class1 == class2)
1716    return;
1717
1718  for (elt = class2; elt; elt = next)
1719    {
1720      unsigned int hash;
1721      rtx exp = elt->exp;
1722      enum machine_mode mode = elt->mode;
1723
1724      next = elt->next_same_value;
1725
1726      /* Remove old entry, make a new one in CLASS1's class.
1727	 Don't do this for invalid entries as we cannot find their
1728	 hash code (it also isn't necessary).  */
1729      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1730	{
1731	  bool need_rehash = false;
1732
1733	  hash_arg_in_memory = 0;
1734	  hash = HASH (exp, mode);
1735
1736	  if (GET_CODE (exp) == REG)
1737	    {
1738	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1739	      delete_reg_equiv (REGNO (exp));
1740	    }
1741
1742	  remove_from_table (elt, hash);
1743
1744	  if (insert_regs (exp, class1, 0) || need_rehash)
1745	    {
1746	      rehash_using_reg (exp);
1747	      hash = HASH (exp, mode);
1748	    }
1749	  new = insert (exp, class1, hash, mode);
1750	  new->in_memory = hash_arg_in_memory;
1751	}
1752    }
1753}
1754
1755/* Flush the entire hash table.  */
1756
1757static void
1758flush_hash_table (void)
1759{
1760  int i;
1761  struct table_elt *p;
1762
1763  for (i = 0; i < HASH_SIZE; i++)
1764    for (p = table[i]; p; p = table[i])
1765      {
1766	/* Note that invalidate can remove elements
1767	   after P in the current hash chain.  */
1768	if (GET_CODE (p->exp) == REG)
1769	  invalidate (p->exp, p->mode);
1770	else
1771	  remove_from_table (p, i);
1772      }
1773}
1774
1775/* Function called for each rtx to check whether true dependence exist.  */
1776struct check_dependence_data
1777{
1778  enum machine_mode mode;
1779  rtx exp;
1780  rtx addr;
1781};
1782
1783static int
1784check_dependence (rtx *x, void *data)
1785{
1786  struct check_dependence_data *d = (struct check_dependence_data *) data;
1787  if (*x && GET_CODE (*x) == MEM)
1788    return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1789		    		  cse_rtx_varies_p);
1790  else
1791    return 0;
1792}
1793
1794/* Remove from the hash table, or mark as invalid, all expressions whose
1795   values could be altered by storing in X.  X is a register, a subreg, or
1796   a memory reference with nonvarying address (because, when a memory
1797   reference with a varying address is stored in, all memory references are
1798   removed by invalidate_memory so specific invalidation is superfluous).
1799   FULL_MODE, if not VOIDmode, indicates that this much should be
1800   invalidated instead of just the amount indicated by the mode of X.  This
1801   is only used for bitfield stores into memory.
1802
1803   A nonvarying address may be just a register or just a symbol reference,
1804   or it may be either of those plus a numeric offset.  */
1805
1806static void
1807invalidate (rtx x, enum machine_mode full_mode)
1808{
1809  int i;
1810  struct table_elt *p;
1811  rtx addr;
1812
1813  switch (GET_CODE (x))
1814    {
1815    case REG:
1816      {
1817	/* If X is a register, dependencies on its contents are recorded
1818	   through the qty number mechanism.  Just change the qty number of
1819	   the register, mark it as invalid for expressions that refer to it,
1820	   and remove it itself.  */
1821	unsigned int regno = REGNO (x);
1822	unsigned int hash = HASH (x, GET_MODE (x));
1823
1824	/* Remove REGNO from any quantity list it might be on and indicate
1825	   that its value might have changed.  If it is a pseudo, remove its
1826	   entry from the hash table.
1827
1828	   For a hard register, we do the first two actions above for any
1829	   additional hard registers corresponding to X.  Then, if any of these
1830	   registers are in the table, we must remove any REG entries that
1831	   overlap these registers.  */
1832
1833	delete_reg_equiv (regno);
1834	REG_TICK (regno)++;
1835	SUBREG_TICKED (regno) = -1;
1836
1837	if (regno >= FIRST_PSEUDO_REGISTER)
1838	  {
1839	    /* Because a register can be referenced in more than one mode,
1840	       we might have to remove more than one table entry.  */
1841	    struct table_elt *elt;
1842
1843	    while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1844	      remove_from_table (elt, hash);
1845	  }
1846	else
1847	  {
1848	    HOST_WIDE_INT in_table
1849	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1850	    unsigned int endregno
1851	      = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1852	    unsigned int tregno, tendregno, rn;
1853	    struct table_elt *p, *next;
1854
1855	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1856
1857	    for (rn = regno + 1; rn < endregno; rn++)
1858	      {
1859		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1860		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1861		delete_reg_equiv (rn);
1862		REG_TICK (rn)++;
1863		SUBREG_TICKED (rn) = -1;
1864	      }
1865
1866	    if (in_table)
1867	      for (hash = 0; hash < HASH_SIZE; hash++)
1868		for (p = table[hash]; p; p = next)
1869		  {
1870		    next = p->next_same_hash;
1871
1872		    if (GET_CODE (p->exp) != REG
1873			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1874		      continue;
1875
1876		    tregno = REGNO (p->exp);
1877		    tendregno
1878		      = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1879		    if (tendregno > regno && tregno < endregno)
1880		      remove_from_table (p, hash);
1881		  }
1882	  }
1883      }
1884      return;
1885
1886    case SUBREG:
1887      invalidate (SUBREG_REG (x), VOIDmode);
1888      return;
1889
1890    case PARALLEL:
1891      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1892	invalidate (XVECEXP (x, 0, i), VOIDmode);
1893      return;
1894
1895    case EXPR_LIST:
1896      /* This is part of a disjoint return value; extract the location in
1897	 question ignoring the offset.  */
1898      invalidate (XEXP (x, 0), VOIDmode);
1899      return;
1900
1901    case MEM:
1902      addr = canon_rtx (get_addr (XEXP (x, 0)));
1903      /* Calculate the canonical version of X here so that
1904	 true_dependence doesn't generate new RTL for X on each call.  */
1905      x = canon_rtx (x);
1906
1907      /* Remove all hash table elements that refer to overlapping pieces of
1908	 memory.  */
1909      if (full_mode == VOIDmode)
1910	full_mode = GET_MODE (x);
1911
1912      for (i = 0; i < HASH_SIZE; i++)
1913	{
1914	  struct table_elt *next;
1915
1916	  for (p = table[i]; p; p = next)
1917	    {
1918	      next = p->next_same_hash;
1919	      if (p->in_memory)
1920		{
1921		  struct check_dependence_data d;
1922
1923		  /* Just canonicalize the expression once;
1924		     otherwise each time we call invalidate
1925		     true_dependence will canonicalize the
1926		     expression again.  */
1927		  if (!p->canon_exp)
1928		    p->canon_exp = canon_rtx (p->exp);
1929		  d.exp = x;
1930		  d.addr = addr;
1931		  d.mode = full_mode;
1932		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1933		    remove_from_table (p, i);
1934		}
1935	    }
1936	}
1937      return;
1938
1939    default:
1940      abort ();
1941    }
1942}
1943
1944/* Remove all expressions that refer to register REGNO,
1945   since they are already invalid, and we are about to
1946   mark that register valid again and don't want the old
1947   expressions to reappear as valid.  */
1948
1949static void
1950remove_invalid_refs (unsigned int regno)
1951{
1952  unsigned int i;
1953  struct table_elt *p, *next;
1954
1955  for (i = 0; i < HASH_SIZE; i++)
1956    for (p = table[i]; p; p = next)
1957      {
1958	next = p->next_same_hash;
1959	if (GET_CODE (p->exp) != REG
1960	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1961	  remove_from_table (p, i);
1962      }
1963}
1964
1965/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1966   and mode MODE.  */
1967static void
1968remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1969			    enum machine_mode mode)
1970{
1971  unsigned int i;
1972  struct table_elt *p, *next;
1973  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1974
1975  for (i = 0; i < HASH_SIZE; i++)
1976    for (p = table[i]; p; p = next)
1977      {
1978	rtx exp = p->exp;
1979	next = p->next_same_hash;
1980
1981	if (GET_CODE (exp) != REG
1982	    && (GET_CODE (exp) != SUBREG
1983		|| GET_CODE (SUBREG_REG (exp)) != REG
1984		|| REGNO (SUBREG_REG (exp)) != regno
1985		|| (((SUBREG_BYTE (exp)
1986		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1987		    && SUBREG_BYTE (exp) <= end))
1988	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1989	  remove_from_table (p, i);
1990      }
1991}
1992
1993/* Recompute the hash codes of any valid entries in the hash table that
1994   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1995
1996   This is called when we make a jump equivalence.  */
1997
1998static void
1999rehash_using_reg (rtx x)
2000{
2001  unsigned int i;
2002  struct table_elt *p, *next;
2003  unsigned hash;
2004
2005  if (GET_CODE (x) == SUBREG)
2006    x = SUBREG_REG (x);
2007
2008  /* If X is not a register or if the register is known not to be in any
2009     valid entries in the table, we have no work to do.  */
2010
2011  if (GET_CODE (x) != REG
2012      || REG_IN_TABLE (REGNO (x)) < 0
2013      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2014    return;
2015
2016  /* Scan all hash chains looking for valid entries that mention X.
2017     If we find one and it is in the wrong hash chain, move it.  */
2018
2019  for (i = 0; i < HASH_SIZE; i++)
2020    for (p = table[i]; p; p = next)
2021      {
2022	next = p->next_same_hash;
2023	if (reg_mentioned_p (x, p->exp)
2024	    && exp_equiv_p (p->exp, p->exp, 1, 0)
2025	    && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2026	  {
2027	    if (p->next_same_hash)
2028	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2029
2030	    if (p->prev_same_hash)
2031	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2032	    else
2033	      table[i] = p->next_same_hash;
2034
2035	    p->next_same_hash = table[hash];
2036	    p->prev_same_hash = 0;
2037	    if (table[hash])
2038	      table[hash]->prev_same_hash = p;
2039	    table[hash] = p;
2040	  }
2041      }
2042}
2043
2044/* Remove from the hash table any expression that is a call-clobbered
2045   register.  Also update their TICK values.  */
2046
2047static void
2048invalidate_for_call (void)
2049{
2050  unsigned int regno, endregno;
2051  unsigned int i;
2052  unsigned hash;
2053  struct table_elt *p, *next;
2054  int in_table = 0;
2055
2056  /* Go through all the hard registers.  For each that is clobbered in
2057     a CALL_INSN, remove the register from quantity chains and update
2058     reg_tick if defined.  Also see if any of these registers is currently
2059     in the table.  */
2060
2061  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2062    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2063      {
2064	delete_reg_equiv (regno);
2065	if (REG_TICK (regno) >= 0)
2066	  {
2067	    REG_TICK (regno)++;
2068	    SUBREG_TICKED (regno) = -1;
2069	  }
2070
2071	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2072      }
2073
2074  /* In the case where we have no call-clobbered hard registers in the
2075     table, we are done.  Otherwise, scan the table and remove any
2076     entry that overlaps a call-clobbered register.  */
2077
2078  if (in_table)
2079    for (hash = 0; hash < HASH_SIZE; hash++)
2080      for (p = table[hash]; p; p = next)
2081	{
2082	  next = p->next_same_hash;
2083
2084	  if (GET_CODE (p->exp) != REG
2085	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2086	    continue;
2087
2088	  regno = REGNO (p->exp);
2089	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2090
2091	  for (i = regno; i < endregno; i++)
2092	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2093	      {
2094		remove_from_table (p, hash);
2095		break;
2096	      }
2097	}
2098}
2099
2100/* Given an expression X of type CONST,
2101   and ELT which is its table entry (or 0 if it
2102   is not in the hash table),
2103   return an alternate expression for X as a register plus integer.
2104   If none can be found, return 0.  */
2105
2106static rtx
2107use_related_value (rtx x, struct table_elt *elt)
2108{
2109  struct table_elt *relt = 0;
2110  struct table_elt *p, *q;
2111  HOST_WIDE_INT offset;
2112
2113  /* First, is there anything related known?
2114     If we have a table element, we can tell from that.
2115     Otherwise, must look it up.  */
2116
2117  if (elt != 0 && elt->related_value != 0)
2118    relt = elt;
2119  else if (elt == 0 && GET_CODE (x) == CONST)
2120    {
2121      rtx subexp = get_related_value (x);
2122      if (subexp != 0)
2123	relt = lookup (subexp,
2124		       safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2125		       GET_MODE (subexp));
2126    }
2127
2128  if (relt == 0)
2129    return 0;
2130
2131  /* Search all related table entries for one that has an
2132     equivalent register.  */
2133
2134  p = relt;
2135  while (1)
2136    {
2137      /* This loop is strange in that it is executed in two different cases.
2138	 The first is when X is already in the table.  Then it is searching
2139	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2140	 X is not in the table.  Then RELT points to a class for the related
2141	 value.
2142
2143	 Ensure that, whatever case we are in, that we ignore classes that have
2144	 the same value as X.  */
2145
2146      if (rtx_equal_p (x, p->exp))
2147	q = 0;
2148      else
2149	for (q = p->first_same_value; q; q = q->next_same_value)
2150	  if (GET_CODE (q->exp) == REG)
2151	    break;
2152
2153      if (q)
2154	break;
2155
2156      p = p->related_value;
2157
2158      /* We went all the way around, so there is nothing to be found.
2159	 Alternatively, perhaps RELT was in the table for some other reason
2160	 and it has no related values recorded.  */
2161      if (p == relt || p == 0)
2162	break;
2163    }
2164
2165  if (q == 0)
2166    return 0;
2167
2168  offset = (get_integer_term (x) - get_integer_term (p->exp));
2169  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2170  return plus_constant (q->exp, offset);
2171}
2172
2173/* Hash a string.  Just add its bytes up.  */
2174static inline unsigned
2175canon_hash_string (const char *ps)
2176{
2177  unsigned hash = 0;
2178  const unsigned char *p = (const unsigned char *) ps;
2179
2180  if (p)
2181    while (*p)
2182      hash += *p++;
2183
2184  return hash;
2185}
2186
2187/* Hash an rtx.  We are careful to make sure the value is never negative.
2188   Equivalent registers hash identically.
2189   MODE is used in hashing for CONST_INTs only;
2190   otherwise the mode of X is used.
2191
2192   Store 1 in do_not_record if any subexpression is volatile.
2193
2194   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2195   which does not have the RTX_UNCHANGING_P bit set.
2196
2197   Note that cse_insn knows that the hash code of a MEM expression
2198   is just (int) MEM plus the hash code of the address.  */
2199
2200static unsigned
2201canon_hash (rtx x, enum machine_mode mode)
2202{
2203  int i, j;
2204  unsigned hash = 0;
2205  enum rtx_code code;
2206  const char *fmt;
2207
2208  /* repeat is used to turn tail-recursion into iteration.  */
2209 repeat:
2210  if (x == 0)
2211    return hash;
2212
2213  code = GET_CODE (x);
2214  switch (code)
2215    {
2216    case REG:
2217      {
2218	unsigned int regno = REGNO (x);
2219	bool record;
2220
2221	/* On some machines, we can't record any non-fixed hard register,
2222	   because extending its life will cause reload problems.  We
2223	   consider ap, fp, sp, gp to be fixed for this purpose.
2224
2225	   We also consider CCmode registers to be fixed for this purpose;
2226	   failure to do so leads to failure to simplify 0<100 type of
2227	   conditionals.
2228
2229	   On all machines, we can't record any global registers.
2230	   Nor should we record any register that is in a small
2231	   class, as defined by CLASS_LIKELY_SPILLED_P.  */
2232
2233	if (regno >= FIRST_PSEUDO_REGISTER)
2234	  record = true;
2235	else if (x == frame_pointer_rtx
2236		 || x == hard_frame_pointer_rtx
2237		 || x == arg_pointer_rtx
2238		 || x == stack_pointer_rtx
2239		 || x == pic_offset_table_rtx)
2240	  record = true;
2241	else if (global_regs[regno])
2242	  record = false;
2243	else if (fixed_regs[regno])
2244	  record = true;
2245	else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2246	  record = true;
2247	else if (SMALL_REGISTER_CLASSES)
2248	  record = false;
2249	else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2250	  record = false;
2251	else
2252	  record = true;
2253
2254	if (!record)
2255	  {
2256	    do_not_record = 1;
2257	    return 0;
2258	  }
2259
2260	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2261	return hash;
2262      }
2263
2264    /* We handle SUBREG of a REG specially because the underlying
2265       reg changes its hash value with every value change; we don't
2266       want to have to forget unrelated subregs when one subreg changes.  */
2267    case SUBREG:
2268      {
2269	if (GET_CODE (SUBREG_REG (x)) == REG)
2270	  {
2271	    hash += (((unsigned) SUBREG << 7)
2272		     + REGNO (SUBREG_REG (x))
2273		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2274	    return hash;
2275	  }
2276	break;
2277      }
2278
2279    case CONST_INT:
2280      {
2281	unsigned HOST_WIDE_INT tem = INTVAL (x);
2282	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2283	return hash;
2284      }
2285
2286    case CONST_DOUBLE:
2287      /* This is like the general case, except that it only counts
2288	 the integers representing the constant.  */
2289      hash += (unsigned) code + (unsigned) GET_MODE (x);
2290      if (GET_MODE (x) != VOIDmode)
2291	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2292      else
2293	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2294		 + (unsigned) CONST_DOUBLE_HIGH (x));
2295      return hash;
2296
2297    case CONST_VECTOR:
2298      {
2299	int units;
2300	rtx elt;
2301
2302	units = CONST_VECTOR_NUNITS (x);
2303
2304	for (i = 0; i < units; ++i)
2305	  {
2306	    elt = CONST_VECTOR_ELT (x, i);
2307	    hash += canon_hash (elt, GET_MODE (elt));
2308	  }
2309
2310	return hash;
2311      }
2312
2313      /* Assume there is only one rtx object for any given label.  */
2314    case LABEL_REF:
2315      hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2316      return hash;
2317
2318    case SYMBOL_REF:
2319      hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2320      return hash;
2321
2322    case MEM:
2323      /* We don't record if marked volatile or if BLKmode since we don't
2324	 know the size of the move.  */
2325      if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2326	{
2327	  do_not_record = 1;
2328	  return 0;
2329	}
2330      if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2331	hash_arg_in_memory = 1;
2332
2333      /* Now that we have already found this special case,
2334	 might as well speed it up as much as possible.  */
2335      hash += (unsigned) MEM;
2336      x = XEXP (x, 0);
2337      goto repeat;
2338
2339    case USE:
2340      /* A USE that mentions non-volatile memory needs special
2341	 handling since the MEM may be BLKmode which normally
2342	 prevents an entry from being made.  Pure calls are
2343	 marked by a USE which mentions BLKmode memory.  */
2344      if (GET_CODE (XEXP (x, 0)) == MEM
2345	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2346	{
2347	  hash += (unsigned) USE;
2348	  x = XEXP (x, 0);
2349
2350	  if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2351	    hash_arg_in_memory = 1;
2352
2353	  /* Now that we have already found this special case,
2354	     might as well speed it up as much as possible.  */
2355	  hash += (unsigned) MEM;
2356	  x = XEXP (x, 0);
2357	  goto repeat;
2358	}
2359      break;
2360
2361    case PRE_DEC:
2362    case PRE_INC:
2363    case POST_DEC:
2364    case POST_INC:
2365    case PRE_MODIFY:
2366    case POST_MODIFY:
2367    case PC:
2368    case CC0:
2369    case CALL:
2370    case UNSPEC_VOLATILE:
2371      do_not_record = 1;
2372      return 0;
2373
2374    case ASM_OPERANDS:
2375      if (MEM_VOLATILE_P (x))
2376	{
2377	  do_not_record = 1;
2378	  return 0;
2379	}
2380      else
2381	{
2382	  /* We don't want to take the filename and line into account.  */
2383	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2384	    + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2385	    + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2386	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2387
2388	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2389	    {
2390	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2391		{
2392		  hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2393				       GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2394			   + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2395						(x, i)));
2396		}
2397
2398	      hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2399	      x = ASM_OPERANDS_INPUT (x, 0);
2400	      mode = GET_MODE (x);
2401	      goto repeat;
2402	    }
2403
2404	  return hash;
2405	}
2406      break;
2407
2408    default:
2409      break;
2410    }
2411
2412  i = GET_RTX_LENGTH (code) - 1;
2413  hash += (unsigned) code + (unsigned) GET_MODE (x);
2414  fmt = GET_RTX_FORMAT (code);
2415  for (; i >= 0; i--)
2416    {
2417      if (fmt[i] == 'e')
2418	{
2419	  rtx tem = XEXP (x, i);
2420
2421	  /* If we are about to do the last recursive call
2422	     needed at this level, change it into iteration.
2423	     This function  is called enough to be worth it.  */
2424	  if (i == 0)
2425	    {
2426	      x = tem;
2427	      goto repeat;
2428	    }
2429	  hash += canon_hash (tem, 0);
2430	}
2431      else if (fmt[i] == 'E')
2432	for (j = 0; j < XVECLEN (x, i); j++)
2433	  hash += canon_hash (XVECEXP (x, i, j), 0);
2434      else if (fmt[i] == 's')
2435	hash += canon_hash_string (XSTR (x, i));
2436      else if (fmt[i] == 'i')
2437	{
2438	  unsigned tem = XINT (x, i);
2439	  hash += tem;
2440	}
2441      else if (fmt[i] == '0' || fmt[i] == 't')
2442	/* Unused.  */
2443	;
2444      else
2445	abort ();
2446    }
2447  return hash;
2448}
2449
2450/* Like canon_hash but with no side effects.  */
2451
2452static unsigned
2453safe_hash (rtx x, enum machine_mode mode)
2454{
2455  int save_do_not_record = do_not_record;
2456  int save_hash_arg_in_memory = hash_arg_in_memory;
2457  unsigned hash = canon_hash (x, mode);
2458  hash_arg_in_memory = save_hash_arg_in_memory;
2459  do_not_record = save_do_not_record;
2460  return hash;
2461}
2462
2463/* Return 1 iff X and Y would canonicalize into the same thing,
2464   without actually constructing the canonicalization of either one.
2465   If VALIDATE is nonzero,
2466   we assume X is an expression being processed from the rtl
2467   and Y was found in the hash table.  We check register refs
2468   in Y for being marked as valid.
2469
2470   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2471   that is known to be in the register.  Ordinarily, we don't allow them
2472   to match, because letting them match would cause unpredictable results
2473   in all the places that search a hash table chain for an equivalent
2474   for a given value.  A possible equivalent that has different structure
2475   has its hash code computed from different data.  Whether the hash code
2476   is the same as that of the given value is pure luck.  */
2477
2478static int
2479exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
2480{
2481  int i, j;
2482  enum rtx_code code;
2483  const char *fmt;
2484
2485  /* Note: it is incorrect to assume an expression is equivalent to itself
2486     if VALIDATE is nonzero.  */
2487  if (x == y && !validate)
2488    return 1;
2489  if (x == 0 || y == 0)
2490    return x == y;
2491
2492  code = GET_CODE (x);
2493  if (code != GET_CODE (y))
2494    {
2495      if (!equal_values)
2496	return 0;
2497
2498      /* If X is a constant and Y is a register or vice versa, they may be
2499	 equivalent.  We only have to validate if Y is a register.  */
2500      if (CONSTANT_P (x) && GET_CODE (y) == REG
2501	  && REGNO_QTY_VALID_P (REGNO (y)))
2502	{
2503	  int y_q = REG_QTY (REGNO (y));
2504	  struct qty_table_elem *y_ent = &qty_table[y_q];
2505
2506	  if (GET_MODE (y) == y_ent->mode
2507	      && rtx_equal_p (x, y_ent->const_rtx)
2508	      && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2509	    return 1;
2510	}
2511
2512      if (CONSTANT_P (y) && code == REG
2513	  && REGNO_QTY_VALID_P (REGNO (x)))
2514	{
2515	  int x_q = REG_QTY (REGNO (x));
2516	  struct qty_table_elem *x_ent = &qty_table[x_q];
2517
2518	  if (GET_MODE (x) == x_ent->mode
2519	      && rtx_equal_p (y, x_ent->const_rtx))
2520	    return 1;
2521	}
2522
2523      return 0;
2524    }
2525
2526  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2527  if (GET_MODE (x) != GET_MODE (y))
2528    return 0;
2529
2530  switch (code)
2531    {
2532    case PC:
2533    case CC0:
2534    case CONST_INT:
2535      return x == y;
2536
2537    case LABEL_REF:
2538      return XEXP (x, 0) == XEXP (y, 0);
2539
2540    case SYMBOL_REF:
2541      return XSTR (x, 0) == XSTR (y, 0);
2542
2543    case REG:
2544      {
2545	unsigned int regno = REGNO (y);
2546	unsigned int endregno
2547	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2548		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2549	unsigned int i;
2550
2551	/* If the quantities are not the same, the expressions are not
2552	   equivalent.  If there are and we are not to validate, they
2553	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2554
2555	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2556	  return 0;
2557
2558	if (! validate)
2559	  return 1;
2560
2561	for (i = regno; i < endregno; i++)
2562	  if (REG_IN_TABLE (i) != REG_TICK (i))
2563	    return 0;
2564
2565	return 1;
2566      }
2567
2568    /*  For commutative operations, check both orders.  */
2569    case PLUS:
2570    case MULT:
2571    case AND:
2572    case IOR:
2573    case XOR:
2574    case NE:
2575    case EQ:
2576      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2577	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2578			       validate, equal_values))
2579	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2580			       validate, equal_values)
2581		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2582				  validate, equal_values)));
2583
2584    case ASM_OPERANDS:
2585      /* We don't use the generic code below because we want to
2586	 disregard filename and line numbers.  */
2587
2588      /* A volatile asm isn't equivalent to any other.  */
2589      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2590	return 0;
2591
2592      if (GET_MODE (x) != GET_MODE (y)
2593	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2594	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2595		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2596	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2597	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2598	return 0;
2599
2600      if (ASM_OPERANDS_INPUT_LENGTH (x))
2601	{
2602	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2603	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2604			       ASM_OPERANDS_INPUT (y, i),
2605			       validate, equal_values)
2606		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2607			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2608	      return 0;
2609	}
2610
2611      return 1;
2612
2613    default:
2614      break;
2615    }
2616
2617  /* Compare the elements.  If any pair of corresponding elements
2618     fail to match, return 0 for the whole things.  */
2619
2620  fmt = GET_RTX_FORMAT (code);
2621  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2622    {
2623      switch (fmt[i])
2624	{
2625	case 'e':
2626	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2627	    return 0;
2628	  break;
2629
2630	case 'E':
2631	  if (XVECLEN (x, i) != XVECLEN (y, i))
2632	    return 0;
2633	  for (j = 0; j < XVECLEN (x, i); j++)
2634	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2635			       validate, equal_values))
2636	      return 0;
2637	  break;
2638
2639	case 's':
2640	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2641	    return 0;
2642	  break;
2643
2644	case 'i':
2645	  if (XINT (x, i) != XINT (y, i))
2646	    return 0;
2647	  break;
2648
2649	case 'w':
2650	  if (XWINT (x, i) != XWINT (y, i))
2651	    return 0;
2652	  break;
2653
2654	case '0':
2655	case 't':
2656	  break;
2657
2658	default:
2659	  abort ();
2660	}
2661    }
2662
2663  return 1;
2664}
2665
2666/* Return 1 if X has a value that can vary even between two
2667   executions of the program.  0 means X can be compared reliably
2668   against certain constants or near-constants.  */
2669
2670static int
2671cse_rtx_varies_p (rtx x, int from_alias)
2672{
2673  /* We need not check for X and the equivalence class being of the same
2674     mode because if X is equivalent to a constant in some mode, it
2675     doesn't vary in any mode.  */
2676
2677  if (GET_CODE (x) == REG
2678      && REGNO_QTY_VALID_P (REGNO (x)))
2679    {
2680      int x_q = REG_QTY (REGNO (x));
2681      struct qty_table_elem *x_ent = &qty_table[x_q];
2682
2683      if (GET_MODE (x) == x_ent->mode
2684	  && x_ent->const_rtx != NULL_RTX)
2685	return 0;
2686    }
2687
2688  if (GET_CODE (x) == PLUS
2689      && GET_CODE (XEXP (x, 1)) == CONST_INT
2690      && GET_CODE (XEXP (x, 0)) == REG
2691      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2692    {
2693      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2694      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2695
2696      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2697	  && x0_ent->const_rtx != NULL_RTX)
2698	return 0;
2699    }
2700
2701  /* This can happen as the result of virtual register instantiation, if
2702     the initial constant is too large to be a valid address.  This gives
2703     us a three instruction sequence, load large offset into a register,
2704     load fp minus a constant into a register, then a MEM which is the
2705     sum of the two `constant' registers.  */
2706  if (GET_CODE (x) == PLUS
2707      && GET_CODE (XEXP (x, 0)) == REG
2708      && GET_CODE (XEXP (x, 1)) == REG
2709      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2710      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2711    {
2712      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2713      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2714      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2715      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2716
2717      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2718	  && x0_ent->const_rtx != NULL_RTX
2719	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2720	  && x1_ent->const_rtx != NULL_RTX)
2721	return 0;
2722    }
2723
2724  return rtx_varies_p (x, from_alias);
2725}
2726
2727/* Canonicalize an expression:
2728   replace each register reference inside it
2729   with the "oldest" equivalent register.
2730
2731   If INSN is nonzero and we are replacing a pseudo with a hard register
2732   or vice versa, validate_change is used to ensure that INSN remains valid
2733   after we make our substitution.  The calls are made with IN_GROUP nonzero
2734   so apply_change_group must be called upon the outermost return from this
2735   function (unless INSN is zero).  The result of apply_change_group can
2736   generally be discarded since the changes we are making are optional.  */
2737
2738static rtx
2739canon_reg (rtx x, rtx insn)
2740{
2741  int i;
2742  enum rtx_code code;
2743  const char *fmt;
2744
2745  if (x == 0)
2746    return x;
2747
2748  code = GET_CODE (x);
2749  switch (code)
2750    {
2751    case PC:
2752    case CC0:
2753    case CONST:
2754    case CONST_INT:
2755    case CONST_DOUBLE:
2756    case CONST_VECTOR:
2757    case SYMBOL_REF:
2758    case LABEL_REF:
2759    case ADDR_VEC:
2760    case ADDR_DIFF_VEC:
2761      return x;
2762
2763    case REG:
2764      {
2765	int first;
2766	int q;
2767	struct qty_table_elem *ent;
2768
2769	/* Never replace a hard reg, because hard regs can appear
2770	   in more than one machine mode, and we must preserve the mode
2771	   of each occurrence.  Also, some hard regs appear in
2772	   MEMs that are shared and mustn't be altered.  Don't try to
2773	   replace any reg that maps to a reg of class NO_REGS.  */
2774	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2775	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2776	  return x;
2777
2778	q = REG_QTY (REGNO (x));
2779	ent = &qty_table[q];
2780	first = ent->first_reg;
2781	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2782		: REGNO_REG_CLASS (first) == NO_REGS ? x
2783		: gen_rtx_REG (ent->mode, first));
2784      }
2785
2786    default:
2787      break;
2788    }
2789
2790  fmt = GET_RTX_FORMAT (code);
2791  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2792    {
2793      int j;
2794
2795      if (fmt[i] == 'e')
2796	{
2797	  rtx new = canon_reg (XEXP (x, i), insn);
2798	  int insn_code;
2799
2800	  /* If replacing pseudo with hard reg or vice versa, ensure the
2801	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2802	  if (insn != 0 && new != 0
2803	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2804	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2805		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2806		  || (insn_code = recog_memoized (insn)) < 0
2807		  || insn_data[insn_code].n_dups > 0))
2808	    validate_change (insn, &XEXP (x, i), new, 1);
2809	  else
2810	    XEXP (x, i) = new;
2811	}
2812      else if (fmt[i] == 'E')
2813	for (j = 0; j < XVECLEN (x, i); j++)
2814	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2815    }
2816
2817  return x;
2818}
2819
2820/* LOC is a location within INSN that is an operand address (the contents of
2821   a MEM).  Find the best equivalent address to use that is valid for this
2822   insn.
2823
2824   On most CISC machines, complicated address modes are costly, and rtx_cost
2825   is a good approximation for that cost.  However, most RISC machines have
2826   only a few (usually only one) memory reference formats.  If an address is
2827   valid at all, it is often just as cheap as any other address.  Hence, for
2828   RISC machines, we use `address_cost' to compare the costs of various
2829   addresses.  For two addresses of equal cost, choose the one with the
2830   highest `rtx_cost' value as that has the potential of eliminating the
2831   most insns.  For equal costs, we choose the first in the equivalence
2832   class.  Note that we ignore the fact that pseudo registers are cheaper than
2833   hard registers here because we would also prefer the pseudo registers.  */
2834
2835static void
2836find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2837{
2838  struct table_elt *elt;
2839  rtx addr = *loc;
2840  struct table_elt *p;
2841  int found_better = 1;
2842  int save_do_not_record = do_not_record;
2843  int save_hash_arg_in_memory = hash_arg_in_memory;
2844  int addr_volatile;
2845  int regno;
2846  unsigned hash;
2847
2848  /* Do not try to replace constant addresses or addresses of local and
2849     argument slots.  These MEM expressions are made only once and inserted
2850     in many instructions, as well as being used to control symbol table
2851     output.  It is not safe to clobber them.
2852
2853     There are some uncommon cases where the address is already in a register
2854     for some reason, but we cannot take advantage of that because we have
2855     no easy way to unshare the MEM.  In addition, looking up all stack
2856     addresses is costly.  */
2857  if ((GET_CODE (addr) == PLUS
2858       && GET_CODE (XEXP (addr, 0)) == REG
2859       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2860       && (regno = REGNO (XEXP (addr, 0)),
2861	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2862	   || regno == ARG_POINTER_REGNUM))
2863      || (GET_CODE (addr) == REG
2864	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2865	      || regno == HARD_FRAME_POINTER_REGNUM
2866	      || regno == ARG_POINTER_REGNUM))
2867      || GET_CODE (addr) == ADDRESSOF
2868      || CONSTANT_ADDRESS_P (addr))
2869    return;
2870
2871  /* If this address is not simply a register, try to fold it.  This will
2872     sometimes simplify the expression.  Many simplifications
2873     will not be valid, but some, usually applying the associative rule, will
2874     be valid and produce better code.  */
2875  if (GET_CODE (addr) != REG)
2876    {
2877      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2878      int addr_folded_cost = address_cost (folded, mode);
2879      int addr_cost = address_cost (addr, mode);
2880
2881      if ((addr_folded_cost < addr_cost
2882	   || (addr_folded_cost == addr_cost
2883	       /* ??? The rtx_cost comparison is left over from an older
2884		  version of this code.  It is probably no longer helpful.  */
2885	       && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2886		   || approx_reg_cost (folded) < approx_reg_cost (addr))))
2887	  && validate_change (insn, loc, folded, 0))
2888	addr = folded;
2889    }
2890
2891  /* If this address is not in the hash table, we can't look for equivalences
2892     of the whole address.  Also, ignore if volatile.  */
2893
2894  do_not_record = 0;
2895  hash = HASH (addr, Pmode);
2896  addr_volatile = do_not_record;
2897  do_not_record = save_do_not_record;
2898  hash_arg_in_memory = save_hash_arg_in_memory;
2899
2900  if (addr_volatile)
2901    return;
2902
2903  elt = lookup (addr, hash, Pmode);
2904
2905  if (elt)
2906    {
2907      /* We need to find the best (under the criteria documented above) entry
2908	 in the class that is valid.  We use the `flag' field to indicate
2909	 choices that were invalid and iterate until we can't find a better
2910	 one that hasn't already been tried.  */
2911
2912      for (p = elt->first_same_value; p; p = p->next_same_value)
2913	p->flag = 0;
2914
2915      while (found_better)
2916	{
2917	  int best_addr_cost = address_cost (*loc, mode);
2918	  int best_rtx_cost = (elt->cost + 1) >> 1;
2919	  int exp_cost;
2920	  struct table_elt *best_elt = elt;
2921
2922	  found_better = 0;
2923	  for (p = elt->first_same_value; p; p = p->next_same_value)
2924	    if (! p->flag)
2925	      {
2926		if ((GET_CODE (p->exp) == REG
2927		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2928		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2929			|| (exp_cost == best_addr_cost
2930			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
2931		  {
2932		    found_better = 1;
2933		    best_addr_cost = exp_cost;
2934		    best_rtx_cost = (p->cost + 1) >> 1;
2935		    best_elt = p;
2936		  }
2937	      }
2938
2939	  if (found_better)
2940	    {
2941	      if (validate_change (insn, loc,
2942				   canon_reg (copy_rtx (best_elt->exp),
2943					      NULL_RTX), 0))
2944		return;
2945	      else
2946		best_elt->flag = 1;
2947	    }
2948	}
2949    }
2950
2951  /* If the address is a binary operation with the first operand a register
2952     and the second a constant, do the same as above, but looking for
2953     equivalences of the register.  Then try to simplify before checking for
2954     the best address to use.  This catches a few cases:  First is when we
2955     have REG+const and the register is another REG+const.  We can often merge
2956     the constants and eliminate one insn and one register.  It may also be
2957     that a machine has a cheap REG+REG+const.  Finally, this improves the
2958     code on the Alpha for unaligned byte stores.  */
2959
2960  if (flag_expensive_optimizations
2961      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2962	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2963      && GET_CODE (XEXP (*loc, 0)) == REG)
2964    {
2965      rtx op1 = XEXP (*loc, 1);
2966
2967      do_not_record = 0;
2968      hash = HASH (XEXP (*loc, 0), Pmode);
2969      do_not_record = save_do_not_record;
2970      hash_arg_in_memory = save_hash_arg_in_memory;
2971
2972      elt = lookup (XEXP (*loc, 0), hash, Pmode);
2973      if (elt == 0)
2974	return;
2975
2976      /* We need to find the best (under the criteria documented above) entry
2977	 in the class that is valid.  We use the `flag' field to indicate
2978	 choices that were invalid and iterate until we can't find a better
2979	 one that hasn't already been tried.  */
2980
2981      for (p = elt->first_same_value; p; p = p->next_same_value)
2982	p->flag = 0;
2983
2984      while (found_better)
2985	{
2986	  int best_addr_cost = address_cost (*loc, mode);
2987	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
2988	  struct table_elt *best_elt = elt;
2989	  rtx best_rtx = *loc;
2990	  int count;
2991
2992	  /* This is at worst case an O(n^2) algorithm, so limit our search
2993	     to the first 32 elements on the list.  This avoids trouble
2994	     compiling code with very long basic blocks that can easily
2995	     call simplify_gen_binary so many times that we run out of
2996	     memory.  */
2997
2998	  found_better = 0;
2999	  for (p = elt->first_same_value, count = 0;
3000	       p && count < 32;
3001	       p = p->next_same_value, count++)
3002	    if (! p->flag
3003		&& (GET_CODE (p->exp) == REG
3004		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3005	      {
3006		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3007					       p->exp, op1);
3008		int new_cost;
3009		new_cost = address_cost (new, mode);
3010
3011		if (new_cost < best_addr_cost
3012		    || (new_cost == best_addr_cost
3013			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3014		  {
3015		    found_better = 1;
3016		    best_addr_cost = new_cost;
3017		    best_rtx_cost = (COST (new) + 1) >> 1;
3018		    best_elt = p;
3019		    best_rtx = new;
3020		  }
3021	      }
3022
3023	  if (found_better)
3024	    {
3025	      if (validate_change (insn, loc,
3026				   canon_reg (copy_rtx (best_rtx),
3027					      NULL_RTX), 0))
3028		return;
3029	      else
3030		best_elt->flag = 1;
3031	    }
3032	}
3033    }
3034}
3035
3036/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3037   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3038   what values are being compared.
3039
3040   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3041   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3042   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3043   compared to produce cc0.
3044
3045   The return value is the comparison operator and is either the code of
3046   A or the code corresponding to the inverse of the comparison.  */
3047
3048static enum rtx_code
3049find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3050		      enum machine_mode *pmode1, enum machine_mode *pmode2)
3051{
3052  rtx arg1, arg2;
3053
3054  arg1 = *parg1, arg2 = *parg2;
3055
3056  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3057
3058  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3059    {
3060      /* Set nonzero when we find something of interest.  */
3061      rtx x = 0;
3062      int reverse_code = 0;
3063      struct table_elt *p = 0;
3064
3065      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3066	 On machines with CC0, this is the only case that can occur, since
3067	 fold_rtx will return the COMPARE or item being compared with zero
3068	 when given CC0.  */
3069
3070      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3071	x = arg1;
3072
3073      /* If ARG1 is a comparison operator and CODE is testing for
3074	 STORE_FLAG_VALUE, get the inner arguments.  */
3075
3076      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3077	{
3078#ifdef FLOAT_STORE_FLAG_VALUE
3079	  REAL_VALUE_TYPE fsfv;
3080#endif
3081
3082	  if (code == NE
3083	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3084		  && code == LT && STORE_FLAG_VALUE == -1)
3085#ifdef FLOAT_STORE_FLAG_VALUE
3086	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3087		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3088		      REAL_VALUE_NEGATIVE (fsfv)))
3089#endif
3090	      )
3091	    x = arg1;
3092	  else if (code == EQ
3093		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3094		       && code == GE && STORE_FLAG_VALUE == -1)
3095#ifdef FLOAT_STORE_FLAG_VALUE
3096		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3097		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3098			   REAL_VALUE_NEGATIVE (fsfv)))
3099#endif
3100		   )
3101	    x = arg1, reverse_code = 1;
3102	}
3103
3104      /* ??? We could also check for
3105
3106	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3107
3108	 and related forms, but let's wait until we see them occurring.  */
3109
3110      if (x == 0)
3111	/* Look up ARG1 in the hash table and see if it has an equivalence
3112	   that lets us see what is being compared.  */
3113	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3114		    GET_MODE (arg1));
3115      if (p)
3116	{
3117	  p = p->first_same_value;
3118
3119	  /* If what we compare is already known to be constant, that is as
3120	     good as it gets.
3121	     We need to break the loop in this case, because otherwise we
3122	     can have an infinite loop when looking at a reg that is known
3123	     to be a constant which is the same as a comparison of a reg
3124	     against zero which appears later in the insn stream, which in
3125	     turn is constant and the same as the comparison of the first reg
3126	     against zero...  */
3127	  if (p->is_const)
3128	    break;
3129	}
3130
3131      for (; p; p = p->next_same_value)
3132	{
3133	  enum machine_mode inner_mode = GET_MODE (p->exp);
3134#ifdef FLOAT_STORE_FLAG_VALUE
3135	  REAL_VALUE_TYPE fsfv;
3136#endif
3137
3138	  /* If the entry isn't valid, skip it.  */
3139	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3140	    continue;
3141
3142	  if (GET_CODE (p->exp) == COMPARE
3143	      /* Another possibility is that this machine has a compare insn
3144		 that includes the comparison code.  In that case, ARG1 would
3145		 be equivalent to a comparison operation that would set ARG1 to
3146		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3147		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3148		 we must reverse ORIG_CODE.  On machine with a negative value
3149		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3150	      || ((code == NE
3151		   || (code == LT
3152		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3153		       && (GET_MODE_BITSIZE (inner_mode)
3154			   <= HOST_BITS_PER_WIDE_INT)
3155		       && (STORE_FLAG_VALUE
3156			   & ((HOST_WIDE_INT) 1
3157			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3158#ifdef FLOAT_STORE_FLAG_VALUE
3159		   || (code == LT
3160		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3161		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3162			   REAL_VALUE_NEGATIVE (fsfv)))
3163#endif
3164		   )
3165		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3166	    {
3167	      x = p->exp;
3168	      break;
3169	    }
3170	  else if ((code == EQ
3171		    || (code == GE
3172			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3173			&& (GET_MODE_BITSIZE (inner_mode)
3174			    <= HOST_BITS_PER_WIDE_INT)
3175			&& (STORE_FLAG_VALUE
3176			    & ((HOST_WIDE_INT) 1
3177			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3178#ifdef FLOAT_STORE_FLAG_VALUE
3179		    || (code == GE
3180			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3181			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3182			    REAL_VALUE_NEGATIVE (fsfv)))
3183#endif
3184		    )
3185		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3186	    {
3187	      reverse_code = 1;
3188	      x = p->exp;
3189	      break;
3190	    }
3191
3192	  /* If this non-trapping address, e.g. fp + constant, the
3193	     equivalent is a better operand since it may let us predict
3194	     the value of the comparison.  */
3195	  else if (!rtx_addr_can_trap_p (p->exp))
3196	    {
3197	      arg1 = p->exp;
3198	      continue;
3199	    }
3200	}
3201
3202      /* If we didn't find a useful equivalence for ARG1, we are done.
3203	 Otherwise, set up for the next iteration.  */
3204      if (x == 0)
3205	break;
3206
3207      /* If we need to reverse the comparison, make sure that that is
3208	 possible -- we can't necessarily infer the value of GE from LT
3209	 with floating-point operands.  */
3210      if (reverse_code)
3211	{
3212	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3213	  if (reversed == UNKNOWN)
3214	    break;
3215	  else
3216	    code = reversed;
3217	}
3218      else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3219	code = GET_CODE (x);
3220      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3221    }
3222
3223  /* Return our results.  Return the modes from before fold_rtx
3224     because fold_rtx might produce const_int, and then it's too late.  */
3225  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3226  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3227
3228  return code;
3229}
3230
3231/* If X is a nontrivial arithmetic operation on an argument
3232   for which a constant value can be determined, return
3233   the result of operating on that value, as a constant.
3234   Otherwise, return X, possibly with one or more operands
3235   modified by recursive calls to this function.
3236
3237   If X is a register whose contents are known, we do NOT
3238   return those contents here.  equiv_constant is called to
3239   perform that task.
3240
3241   INSN is the insn that we may be modifying.  If it is 0, make a copy
3242   of X before modifying it.  */
3243
3244static rtx
3245fold_rtx (rtx x, rtx insn)
3246{
3247  enum rtx_code code;
3248  enum machine_mode mode;
3249  const char *fmt;
3250  int i;
3251  rtx new = 0;
3252  int copied = 0;
3253  int must_swap = 0;
3254
3255  /* Folded equivalents of first two operands of X.  */
3256  rtx folded_arg0;
3257  rtx folded_arg1;
3258
3259  /* Constant equivalents of first three operands of X;
3260     0 when no such equivalent is known.  */
3261  rtx const_arg0;
3262  rtx const_arg1;
3263  rtx const_arg2;
3264
3265  /* The mode of the first operand of X.  We need this for sign and zero
3266     extends.  */
3267  enum machine_mode mode_arg0;
3268
3269  if (x == 0)
3270    return x;
3271
3272  mode = GET_MODE (x);
3273  code = GET_CODE (x);
3274  switch (code)
3275    {
3276    case CONST:
3277    case CONST_INT:
3278    case CONST_DOUBLE:
3279    case CONST_VECTOR:
3280    case SYMBOL_REF:
3281    case LABEL_REF:
3282    case REG:
3283      /* No use simplifying an EXPR_LIST
3284	 since they are used only for lists of args
3285	 in a function call's REG_EQUAL note.  */
3286    case EXPR_LIST:
3287      /* Changing anything inside an ADDRESSOF is incorrect; we don't
3288	 want to (e.g.,) make (addressof (const_int 0)) just because
3289	 the location is known to be zero.  */
3290    case ADDRESSOF:
3291      return x;
3292
3293#ifdef HAVE_cc0
3294    case CC0:
3295      return prev_insn_cc0;
3296#endif
3297
3298    case PC:
3299      /* If the next insn is a CODE_LABEL followed by a jump table,
3300	 PC's value is a LABEL_REF pointing to that label.  That
3301	 lets us fold switch statements on the VAX.  */
3302      {
3303	rtx next;
3304	if (insn && tablejump_p (insn, &next, NULL))
3305	  return gen_rtx_LABEL_REF (Pmode, next);
3306      }
3307      break;
3308
3309    case SUBREG:
3310      /* See if we previously assigned a constant value to this SUBREG.  */
3311      if ((new = lookup_as_function (x, CONST_INT)) != 0
3312	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3313	return new;
3314
3315      /* If this is a paradoxical SUBREG, we have no idea what value the
3316	 extra bits would have.  However, if the operand is equivalent
3317	 to a SUBREG whose operand is the same as our mode, and all the
3318	 modes are within a word, we can just use the inner operand
3319	 because these SUBREGs just say how to treat the register.
3320
3321	 Similarly if we find an integer constant.  */
3322
3323      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3324	{
3325	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3326	  struct table_elt *elt;
3327
3328	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3329	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3330	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3331				imode)) != 0)
3332	    for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3333	      {
3334		if (CONSTANT_P (elt->exp)
3335		    && GET_MODE (elt->exp) == VOIDmode)
3336		  return elt->exp;
3337
3338		if (GET_CODE (elt->exp) == SUBREG
3339		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
3340		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3341		  return copy_rtx (SUBREG_REG (elt->exp));
3342	      }
3343
3344	  return x;
3345	}
3346
3347      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
3348	 We might be able to if the SUBREG is extracting a single word in an
3349	 integral mode or extracting the low part.  */
3350
3351      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3352      const_arg0 = equiv_constant (folded_arg0);
3353      if (const_arg0)
3354	folded_arg0 = const_arg0;
3355
3356      if (folded_arg0 != SUBREG_REG (x))
3357	{
3358	  new = simplify_subreg (mode, folded_arg0,
3359				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3360	  if (new)
3361	    return new;
3362	}
3363
3364      /* If this is a narrowing SUBREG and our operand is a REG, see if
3365	 we can find an equivalence for REG that is an arithmetic operation
3366	 in a wider mode where both operands are paradoxical SUBREGs
3367	 from objects of our result mode.  In that case, we couldn't report
3368	 an equivalent value for that operation, since we don't know what the
3369	 extra bits will be.  But we can find an equivalence for this SUBREG
3370	 by folding that operation is the narrow mode.  This allows us to
3371	 fold arithmetic in narrow modes when the machine only supports
3372	 word-sized arithmetic.
3373
3374	 Also look for a case where we have a SUBREG whose operand is the
3375	 same as our result.  If both modes are smaller than a word, we
3376	 are simply interpreting a register in different modes and we
3377	 can use the inner value.  */
3378
3379      if (GET_CODE (folded_arg0) == REG
3380	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3381	  && subreg_lowpart_p (x))
3382	{
3383	  struct table_elt *elt;
3384
3385	  /* We can use HASH here since we know that canon_hash won't be
3386	     called.  */
3387	  elt = lookup (folded_arg0,
3388			HASH (folded_arg0, GET_MODE (folded_arg0)),
3389			GET_MODE (folded_arg0));
3390
3391	  if (elt)
3392	    elt = elt->first_same_value;
3393
3394	  for (; elt; elt = elt->next_same_value)
3395	    {
3396	      enum rtx_code eltcode = GET_CODE (elt->exp);
3397
3398	      /* Just check for unary and binary operations.  */
3399	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3400		  && GET_CODE (elt->exp) != SIGN_EXTEND
3401		  && GET_CODE (elt->exp) != ZERO_EXTEND
3402		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3403		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3404		  && (GET_MODE_CLASS (mode)
3405		      == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3406		{
3407		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3408
3409		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3410		    op0 = fold_rtx (op0, NULL_RTX);
3411
3412		  op0 = equiv_constant (op0);
3413		  if (op0)
3414		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3415						    op0, mode);
3416		}
3417	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3418			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3419		       && eltcode != DIV && eltcode != MOD
3420		       && eltcode != UDIV && eltcode != UMOD
3421		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3422		       && eltcode != ROTATE && eltcode != ROTATERT
3423		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3424			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3425				== mode))
3426			   || CONSTANT_P (XEXP (elt->exp, 0)))
3427		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3428			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3429				== mode))
3430			   || CONSTANT_P (XEXP (elt->exp, 1))))
3431		{
3432		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3433		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3434
3435		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3436		    op0 = fold_rtx (op0, NULL_RTX);
3437
3438		  if (op0)
3439		    op0 = equiv_constant (op0);
3440
3441		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3442		    op1 = fold_rtx (op1, NULL_RTX);
3443
3444		  if (op1)
3445		    op1 = equiv_constant (op1);
3446
3447		  /* If we are looking for the low SImode part of
3448		     (ashift:DI c (const_int 32)), it doesn't work
3449		     to compute that in SImode, because a 32-bit shift
3450		     in SImode is unpredictable.  We know the value is 0.  */
3451		  if (op0 && op1
3452		      && GET_CODE (elt->exp) == ASHIFT
3453		      && GET_CODE (op1) == CONST_INT
3454		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3455		    {
3456		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3457
3458			/* If the count fits in the inner mode's width,
3459			   but exceeds the outer mode's width,
3460			   the value will get truncated to 0
3461			   by the subreg.  */
3462			new = const0_rtx;
3463		      else
3464			/* If the count exceeds even the inner mode's width,
3465			   don't fold this expression.  */
3466			new = 0;
3467		    }
3468		  else if (op0 && op1)
3469		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3470						     op0, op1);
3471		}
3472
3473	      else if (GET_CODE (elt->exp) == SUBREG
3474		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
3475		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3476			   <= UNITS_PER_WORD)
3477		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3478		new = copy_rtx (SUBREG_REG (elt->exp));
3479
3480	      if (new)
3481		return new;
3482	    }
3483	}
3484
3485      return x;
3486
3487    case NOT:
3488    case NEG:
3489      /* If we have (NOT Y), see if Y is known to be (NOT Z).
3490	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3491      new = lookup_as_function (XEXP (x, 0), code);
3492      if (new)
3493	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3494      break;
3495
3496    case MEM:
3497      /* If we are not actually processing an insn, don't try to find the
3498	 best address.  Not only don't we care, but we could modify the
3499	 MEM in an invalid way since we have no insn to validate against.  */
3500      if (insn != 0)
3501	find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3502
3503      {
3504	/* Even if we don't fold in the insn itself,
3505	   we can safely do so here, in hopes of getting a constant.  */
3506	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3507	rtx base = 0;
3508	HOST_WIDE_INT offset = 0;
3509
3510	if (GET_CODE (addr) == REG
3511	    && REGNO_QTY_VALID_P (REGNO (addr)))
3512	  {
3513	    int addr_q = REG_QTY (REGNO (addr));
3514	    struct qty_table_elem *addr_ent = &qty_table[addr_q];
3515
3516	    if (GET_MODE (addr) == addr_ent->mode
3517		&& addr_ent->const_rtx != NULL_RTX)
3518	      addr = addr_ent->const_rtx;
3519	  }
3520
3521	/* If address is constant, split it into a base and integer offset.  */
3522	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3523	  base = addr;
3524	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3525		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3526	  {
3527	    base = XEXP (XEXP (addr, 0), 0);
3528	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3529	  }
3530	else if (GET_CODE (addr) == LO_SUM
3531		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3532	  base = XEXP (addr, 1);
3533	else if (GET_CODE (addr) == ADDRESSOF)
3534	  return change_address (x, VOIDmode, addr);
3535
3536	/* If this is a constant pool reference, we can fold it into its
3537	   constant to allow better value tracking.  */
3538	if (base && GET_CODE (base) == SYMBOL_REF
3539	    && CONSTANT_POOL_ADDRESS_P (base))
3540	  {
3541	    rtx constant = get_pool_constant (base);
3542	    enum machine_mode const_mode = get_pool_mode (base);
3543	    rtx new;
3544
3545	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3546	      {
3547		constant_pool_entries_cost = COST (constant);
3548		constant_pool_entries_regcost = approx_reg_cost (constant);
3549	      }
3550
3551	    /* If we are loading the full constant, we have an equivalence.  */
3552	    if (offset == 0 && mode == const_mode)
3553	      return constant;
3554
3555	    /* If this actually isn't a constant (weird!), we can't do
3556	       anything.  Otherwise, handle the two most common cases:
3557	       extracting a word from a multi-word constant, and extracting
3558	       the low-order bits.  Other cases don't seem common enough to
3559	       worry about.  */
3560	    if (! CONSTANT_P (constant))
3561	      return x;
3562
3563	    if (GET_MODE_CLASS (mode) == MODE_INT
3564		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
3565		&& offset % UNITS_PER_WORD == 0
3566		&& (new = operand_subword (constant,
3567					   offset / UNITS_PER_WORD,
3568					   0, const_mode)) != 0)
3569	      return new;
3570
3571	    if (((BYTES_BIG_ENDIAN
3572		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3573		 || (! BYTES_BIG_ENDIAN && offset == 0))
3574		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
3575	      return new;
3576	  }
3577
3578	/* If this is a reference to a label at a known position in a jump
3579	   table, we also know its value.  */
3580	if (base && GET_CODE (base) == LABEL_REF)
3581	  {
3582	    rtx label = XEXP (base, 0);
3583	    rtx table_insn = NEXT_INSN (label);
3584
3585	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3586		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3587	      {
3588		rtx table = PATTERN (table_insn);
3589
3590		if (offset >= 0
3591		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3592			< XVECLEN (table, 0)))
3593		  return XVECEXP (table, 0,
3594				  offset / GET_MODE_SIZE (GET_MODE (table)));
3595	      }
3596	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3597		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3598	      {
3599		rtx table = PATTERN (table_insn);
3600
3601		if (offset >= 0
3602		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3603			< XVECLEN (table, 1)))
3604		  {
3605		    offset /= GET_MODE_SIZE (GET_MODE (table));
3606		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3607					 XEXP (table, 0));
3608
3609		    if (GET_MODE (table) != Pmode)
3610		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3611
3612		    /* Indicate this is a constant.  This isn't a
3613		       valid form of CONST, but it will only be used
3614		       to fold the next insns and then discarded, so
3615		       it should be safe.
3616
3617		       Note this expression must be explicitly discarded,
3618		       by cse_insn, else it may end up in a REG_EQUAL note
3619		       and "escape" to cause problems elsewhere.  */
3620		    return gen_rtx_CONST (GET_MODE (new), new);
3621		  }
3622	      }
3623	  }
3624
3625	return x;
3626      }
3627
3628#ifdef NO_FUNCTION_CSE
3629    case CALL:
3630      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3631	return x;
3632      break;
3633#endif
3634
3635    case ASM_OPERANDS:
3636      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3637	validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3638			 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3639      break;
3640
3641    default:
3642      break;
3643    }
3644
3645  const_arg0 = 0;
3646  const_arg1 = 0;
3647  const_arg2 = 0;
3648  mode_arg0 = VOIDmode;
3649
3650  /* Try folding our operands.
3651     Then see which ones have constant values known.  */
3652
3653  fmt = GET_RTX_FORMAT (code);
3654  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3655    if (fmt[i] == 'e')
3656      {
3657	rtx arg = XEXP (x, i);
3658	rtx folded_arg = arg, const_arg = 0;
3659	enum machine_mode mode_arg = GET_MODE (arg);
3660	rtx cheap_arg, expensive_arg;
3661	rtx replacements[2];
3662	int j;
3663	int old_cost = COST_IN (XEXP (x, i), code);
3664
3665	/* Most arguments are cheap, so handle them specially.  */
3666	switch (GET_CODE (arg))
3667	  {
3668	  case REG:
3669	    /* This is the same as calling equiv_constant; it is duplicated
3670	       here for speed.  */
3671	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3672	      {
3673		int arg_q = REG_QTY (REGNO (arg));
3674		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3675
3676		if (arg_ent->const_rtx != NULL_RTX
3677		    && GET_CODE (arg_ent->const_rtx) != REG
3678		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3679		  const_arg
3680		    = gen_lowpart_if_possible (GET_MODE (arg),
3681					       arg_ent->const_rtx);
3682	      }
3683	    break;
3684
3685	  case CONST:
3686	  case CONST_INT:
3687	  case SYMBOL_REF:
3688	  case LABEL_REF:
3689	  case CONST_DOUBLE:
3690	  case CONST_VECTOR:
3691	    const_arg = arg;
3692	    break;
3693
3694#ifdef HAVE_cc0
3695	  case CC0:
3696	    folded_arg = prev_insn_cc0;
3697	    mode_arg = prev_insn_cc0_mode;
3698	    const_arg = equiv_constant (folded_arg);
3699	    break;
3700#endif
3701
3702	  default:
3703	    folded_arg = fold_rtx (arg, insn);
3704	    const_arg = equiv_constant (folded_arg);
3705	  }
3706
3707	/* For the first three operands, see if the operand
3708	   is constant or equivalent to a constant.  */
3709	switch (i)
3710	  {
3711	  case 0:
3712	    folded_arg0 = folded_arg;
3713	    const_arg0 = const_arg;
3714	    mode_arg0 = mode_arg;
3715	    break;
3716	  case 1:
3717	    folded_arg1 = folded_arg;
3718	    const_arg1 = const_arg;
3719	    break;
3720	  case 2:
3721	    const_arg2 = const_arg;
3722	    break;
3723	  }
3724
3725	/* Pick the least expensive of the folded argument and an
3726	   equivalent constant argument.  */
3727	if (const_arg == 0 || const_arg == folded_arg
3728	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3729	  cheap_arg = folded_arg, expensive_arg = const_arg;
3730	else
3731	  cheap_arg = const_arg, expensive_arg = folded_arg;
3732
3733	/* Try to replace the operand with the cheapest of the two
3734	   possibilities.  If it doesn't work and this is either of the first
3735	   two operands of a commutative operation, try swapping them.
3736	   If THAT fails, try the more expensive, provided it is cheaper
3737	   than what is already there.  */
3738
3739	if (cheap_arg == XEXP (x, i))
3740	  continue;
3741
3742	if (insn == 0 && ! copied)
3743	  {
3744	    x = copy_rtx (x);
3745	    copied = 1;
3746	  }
3747
3748	/* Order the replacements from cheapest to most expensive.  */
3749	replacements[0] = cheap_arg;
3750	replacements[1] = expensive_arg;
3751
3752	for (j = 0; j < 2 && replacements[j]; j++)
3753	  {
3754	    int new_cost = COST_IN (replacements[j], code);
3755
3756	    /* Stop if what existed before was cheaper.  Prefer constants
3757	       in the case of a tie.  */
3758	    if (new_cost > old_cost
3759		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3760	      break;
3761
3762	    /* It's not safe to substitute the operand of a conversion
3763	       operator with a constant, as the conversion's identity
3764	       depends upon the mode of it's operand.  This optimization
3765	       is handled by the call to simplify_unary_operation.  */
3766	    if (GET_RTX_CLASS (code) == '1'
3767		&& GET_MODE (replacements[j]) != mode_arg0
3768		&& (code == ZERO_EXTEND
3769		    || code == SIGN_EXTEND
3770		    || code == TRUNCATE
3771		    || code == FLOAT_TRUNCATE
3772		    || code == FLOAT_EXTEND
3773		    || code == FLOAT
3774		    || code == FIX
3775		    || code == UNSIGNED_FLOAT
3776		    || code == UNSIGNED_FIX))
3777	      continue;
3778
3779	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3780	      break;
3781
3782	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3783		|| code == LTGT || code == UNEQ || code == ORDERED
3784		|| code == UNORDERED)
3785	      {
3786		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3787		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3788
3789		if (apply_change_group ())
3790		  {
3791		    /* Swap them back to be invalid so that this loop can
3792		       continue and flag them to be swapped back later.  */
3793		    rtx tem;
3794
3795		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3796				       XEXP (x, 1) = tem;
3797		    must_swap = 1;
3798		    break;
3799		  }
3800	      }
3801	  }
3802      }
3803
3804    else
3805      {
3806	if (fmt[i] == 'E')
3807	  /* Don't try to fold inside of a vector of expressions.
3808	     Doing nothing is harmless.  */
3809	  {;}
3810      }
3811
3812  /* If a commutative operation, place a constant integer as the second
3813     operand unless the first operand is also a constant integer.  Otherwise,
3814     place any constant second unless the first operand is also a constant.  */
3815
3816  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3817      || code == LTGT || code == UNEQ || code == ORDERED
3818      || code == UNORDERED)
3819    {
3820      if (must_swap
3821	  || swap_commutative_operands_p (const_arg0 ? const_arg0
3822						     : XEXP (x, 0),
3823					  const_arg1 ? const_arg1
3824						     : XEXP (x, 1)))
3825	{
3826	  rtx tem = XEXP (x, 0);
3827
3828	  if (insn == 0 && ! copied)
3829	    {
3830	      x = copy_rtx (x);
3831	      copied = 1;
3832	    }
3833
3834	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3835	  validate_change (insn, &XEXP (x, 1), tem, 1);
3836	  if (apply_change_group ())
3837	    {
3838	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3839	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3840	    }
3841	}
3842    }
3843
3844  /* If X is an arithmetic operation, see if we can simplify it.  */
3845
3846  switch (GET_RTX_CLASS (code))
3847    {
3848    case '1':
3849      {
3850	int is_const = 0;
3851
3852	/* We can't simplify extension ops unless we know the
3853	   original mode.  */
3854	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3855	    && mode_arg0 == VOIDmode)
3856	  break;
3857
3858	/* If we had a CONST, strip it off and put it back later if we
3859	   fold.  */
3860	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3861	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3862
3863	new = simplify_unary_operation (code, mode,
3864					const_arg0 ? const_arg0 : folded_arg0,
3865					mode_arg0);
3866	if (new != 0 && is_const)
3867	  new = gen_rtx_CONST (mode, new);
3868      }
3869      break;
3870
3871    case '<':
3872      /* Don't perform any simplifications of vector mode comparisons.  */
3873      if (VECTOR_MODE_P (mode))
3874	break;
3875
3876      /* See what items are actually being compared and set FOLDED_ARG[01]
3877	 to those values and CODE to the actual comparison code.  If any are
3878	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3879	 do anything if both operands are already known to be constant.  */
3880
3881      if (const_arg0 == 0 || const_arg1 == 0)
3882	{
3883	  struct table_elt *p0, *p1;
3884	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3885	  enum machine_mode mode_arg1;
3886
3887#ifdef FLOAT_STORE_FLAG_VALUE
3888	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3889	    {
3890	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3891			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3892	      false_rtx = CONST0_RTX (mode);
3893	    }
3894#endif
3895
3896	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3897				       &mode_arg0, &mode_arg1);
3898	  const_arg0 = equiv_constant (folded_arg0);
3899	  const_arg1 = equiv_constant (folded_arg1);
3900
3901	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3902	     what kinds of things are being compared, so we can't do
3903	     anything with this comparison.  */
3904
3905	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3906	    break;
3907
3908	  /* If we do not now have two constants being compared, see
3909	     if we can nevertheless deduce some things about the
3910	     comparison.  */
3911	  if (const_arg0 == 0 || const_arg1 == 0)
3912	    {
3913	      /* Some addresses are known to be nonzero.  We don't know
3914		 their sign, but equality comparisons are known.  */
3915	      if (const_arg1 == const0_rtx
3916		  && nonzero_address_p (folded_arg0))
3917		{
3918		  if (code == EQ)
3919		    return false_rtx;
3920		  else if (code == NE)
3921		    return true_rtx;
3922		}
3923
3924	      /* See if the two operands are the same.  */
3925
3926	      if (folded_arg0 == folded_arg1
3927		  || (GET_CODE (folded_arg0) == REG
3928		      && GET_CODE (folded_arg1) == REG
3929		      && (REG_QTY (REGNO (folded_arg0))
3930			  == REG_QTY (REGNO (folded_arg1))))
3931		  || ((p0 = lookup (folded_arg0,
3932				    (safe_hash (folded_arg0, mode_arg0)
3933				     & HASH_MASK), mode_arg0))
3934		      && (p1 = lookup (folded_arg1,
3935				       (safe_hash (folded_arg1, mode_arg0)
3936					& HASH_MASK), mode_arg0))
3937		      && p0->first_same_value == p1->first_same_value))
3938		{
3939		  /* Sadly two equal NaNs are not equivalent.  */
3940		  if (!HONOR_NANS (mode_arg0))
3941		    return ((code == EQ || code == LE || code == GE
3942			     || code == LEU || code == GEU || code == UNEQ
3943			     || code == UNLE || code == UNGE
3944			     || code == ORDERED)
3945			    ? true_rtx : false_rtx);
3946		  /* Take care for the FP compares we can resolve.  */
3947		  if (code == UNEQ || code == UNLE || code == UNGE)
3948		    return true_rtx;
3949		  if (code == LTGT || code == LT || code == GT)
3950		    return false_rtx;
3951		}
3952
3953	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3954		 doing now is either the same as we did before or the reverse
3955		 (we only check the reverse if not floating-point).  */
3956	      else if (GET_CODE (folded_arg0) == REG)
3957		{
3958		  int qty = REG_QTY (REGNO (folded_arg0));
3959
3960		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3961		    {
3962		      struct qty_table_elem *ent = &qty_table[qty];
3963
3964		      if ((comparison_dominates_p (ent->comparison_code, code)
3965			   || (! FLOAT_MODE_P (mode_arg0)
3966			       && comparison_dominates_p (ent->comparison_code,
3967						          reverse_condition (code))))
3968			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3969			      || (const_arg1
3970				  && rtx_equal_p (ent->comparison_const,
3971						  const_arg1))
3972			      || (GET_CODE (folded_arg1) == REG
3973				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3974			return (comparison_dominates_p (ent->comparison_code, code)
3975				? true_rtx : false_rtx);
3976		    }
3977		}
3978	    }
3979	}
3980
3981      /* If we are comparing against zero, see if the first operand is
3982	 equivalent to an IOR with a constant.  If so, we may be able to
3983	 determine the result of this comparison.  */
3984
3985      if (const_arg1 == const0_rtx)
3986	{
3987	  rtx y = lookup_as_function (folded_arg0, IOR);
3988	  rtx inner_const;
3989
3990	  if (y != 0
3991	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3992	      && GET_CODE (inner_const) == CONST_INT
3993	      && INTVAL (inner_const) != 0)
3994	    {
3995	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3996	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3997			      && (INTVAL (inner_const)
3998				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3999	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4000
4001#ifdef FLOAT_STORE_FLAG_VALUE
4002	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4003		{
4004		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4005			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4006		  false_rtx = CONST0_RTX (mode);
4007		}
4008#endif
4009
4010	      switch (code)
4011		{
4012		case EQ:
4013		  return false_rtx;
4014		case NE:
4015		  return true_rtx;
4016		case LT:  case LE:
4017		  if (has_sign)
4018		    return true_rtx;
4019		  break;
4020		case GT:  case GE:
4021		  if (has_sign)
4022		    return false_rtx;
4023		  break;
4024		default:
4025		  break;
4026		}
4027	    }
4028	}
4029
4030      new = simplify_relational_operation (code,
4031					   (mode_arg0 != VOIDmode
4032					    ? mode_arg0
4033					    : (GET_MODE (const_arg0
4034							 ? const_arg0
4035							 : folded_arg0)
4036					       != VOIDmode)
4037					    ? GET_MODE (const_arg0
4038							? const_arg0
4039							: folded_arg0)
4040					    : GET_MODE (const_arg1
4041							? const_arg1
4042							: folded_arg1)),
4043					   const_arg0 ? const_arg0 : folded_arg0,
4044					   const_arg1 ? const_arg1 : folded_arg1);
4045#ifdef FLOAT_STORE_FLAG_VALUE
4046      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4047	{
4048	  if (new == const0_rtx)
4049	    new = CONST0_RTX (mode);
4050	  else
4051	    new = (CONST_DOUBLE_FROM_REAL_VALUE
4052		   (FLOAT_STORE_FLAG_VALUE (mode), mode));
4053	}
4054#endif
4055      break;
4056
4057    case '2':
4058    case 'c':
4059      switch (code)
4060	{
4061	case PLUS:
4062	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4063	     with that LABEL_REF as its second operand.  If so, the result is
4064	     the first operand of that MINUS.  This handles switches with an
4065	     ADDR_DIFF_VEC table.  */
4066	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4067	    {
4068	      rtx y
4069		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4070		: lookup_as_function (folded_arg0, MINUS);
4071
4072	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4073		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4074		return XEXP (y, 0);
4075
4076	      /* Now try for a CONST of a MINUS like the above.  */
4077	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4078			: lookup_as_function (folded_arg0, CONST))) != 0
4079		  && GET_CODE (XEXP (y, 0)) == MINUS
4080		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4081		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4082		return XEXP (XEXP (y, 0), 0);
4083	    }
4084
4085	  /* Likewise if the operands are in the other order.  */
4086	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4087	    {
4088	      rtx y
4089		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4090		: lookup_as_function (folded_arg1, MINUS);
4091
4092	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4093		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4094		return XEXP (y, 0);
4095
4096	      /* Now try for a CONST of a MINUS like the above.  */
4097	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4098			: lookup_as_function (folded_arg1, CONST))) != 0
4099		  && GET_CODE (XEXP (y, 0)) == MINUS
4100		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4101		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4102		return XEXP (XEXP (y, 0), 0);
4103	    }
4104
4105	  /* If second operand is a register equivalent to a negative
4106	     CONST_INT, see if we can find a register equivalent to the
4107	     positive constant.  Make a MINUS if so.  Don't do this for
4108	     a non-negative constant since we might then alternate between
4109	     choosing positive and negative constants.  Having the positive
4110	     constant previously-used is the more common case.  Be sure
4111	     the resulting constant is non-negative; if const_arg1 were
4112	     the smallest negative number this would overflow: depending
4113	     on the mode, this would either just be the same value (and
4114	     hence not save anything) or be incorrect.  */
4115	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4116	      && INTVAL (const_arg1) < 0
4117	      /* This used to test
4118
4119	         -INTVAL (const_arg1) >= 0
4120
4121		 But The Sun V5.0 compilers mis-compiled that test.  So
4122		 instead we test for the problematic value in a more direct
4123		 manner and hope the Sun compilers get it correct.  */
4124	      && INTVAL (const_arg1) !=
4125	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4126	      && GET_CODE (folded_arg1) == REG)
4127	    {
4128	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4129	      struct table_elt *p
4130		= lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4131			  mode);
4132
4133	      if (p)
4134		for (p = p->first_same_value; p; p = p->next_same_value)
4135		  if (GET_CODE (p->exp) == REG)
4136		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4137						canon_reg (p->exp, NULL_RTX));
4138	    }
4139	  goto from_plus;
4140
4141	case MINUS:
4142	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4143	     If so, produce (PLUS Z C2-C).  */
4144	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4145	    {
4146	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4147	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4148		return fold_rtx (plus_constant (copy_rtx (y),
4149						-INTVAL (const_arg1)),
4150				 NULL_RTX);
4151	    }
4152
4153	  /* Fall through.  */
4154
4155	from_plus:
4156	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4157	case IOR:     case AND:       case XOR:
4158	case MULT:
4159	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4160	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4161	     is known to be of similar form, we may be able to replace the
4162	     operation with a combined operation.  This may eliminate the
4163	     intermediate operation if every use is simplified in this way.
4164	     Note that the similar optimization done by combine.c only works
4165	     if the intermediate operation's result has only one reference.  */
4166
4167	  if (GET_CODE (folded_arg0) == REG
4168	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4169	    {
4170	      int is_shift
4171		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4172	      rtx y = lookup_as_function (folded_arg0, code);
4173	      rtx inner_const;
4174	      enum rtx_code associate_code;
4175	      rtx new_const;
4176
4177	      if (y == 0
4178		  || 0 == (inner_const
4179			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4180		  || GET_CODE (inner_const) != CONST_INT
4181		  /* If we have compiled a statement like
4182		     "if (x == (x & mask1))", and now are looking at
4183		     "x & mask2", we will have a case where the first operand
4184		     of Y is the same as our first operand.  Unless we detect
4185		     this case, an infinite loop will result.  */
4186		  || XEXP (y, 0) == folded_arg0)
4187		break;
4188
4189	      /* Don't associate these operations if they are a PLUS with the
4190		 same constant and it is a power of two.  These might be doable
4191		 with a pre- or post-increment.  Similarly for two subtracts of
4192		 identical powers of two with post decrement.  */
4193
4194	      if (code == PLUS && const_arg1 == inner_const
4195		  && ((HAVE_PRE_INCREMENT
4196			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4197		      || (HAVE_POST_INCREMENT
4198			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4199		      || (HAVE_PRE_DECREMENT
4200			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4201		      || (HAVE_POST_DECREMENT
4202			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4203		break;
4204
4205	      /* Compute the code used to compose the constants.  For example,
4206		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
4207
4208	      associate_code = (is_shift || code == MINUS ? PLUS : code);
4209
4210	      new_const = simplify_binary_operation (associate_code, mode,
4211						     const_arg1, inner_const);
4212
4213	      if (new_const == 0)
4214		break;
4215
4216	      /* If we are associating shift operations, don't let this
4217		 produce a shift of the size of the object or larger.
4218		 This could occur when we follow a sign-extend by a right
4219		 shift on a machine that does a sign-extend as a pair
4220		 of shifts.  */
4221
4222	      if (is_shift && GET_CODE (new_const) == CONST_INT
4223		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4224		{
4225		  /* As an exception, we can turn an ASHIFTRT of this
4226		     form into a shift of the number of bits - 1.  */
4227		  if (code == ASHIFTRT)
4228		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4229		  else
4230		    break;
4231		}
4232
4233	      y = copy_rtx (XEXP (y, 0));
4234
4235	      /* If Y contains our first operand (the most common way this
4236		 can happen is if Y is a MEM), we would do into an infinite
4237		 loop if we tried to fold it.  So don't in that case.  */
4238
4239	      if (! reg_mentioned_p (folded_arg0, y))
4240		y = fold_rtx (y, insn);
4241
4242	      return simplify_gen_binary (code, mode, y, new_const);
4243	    }
4244	  break;
4245
4246	case DIV:       case UDIV:
4247	  /* ??? The associative optimization performed immediately above is
4248	     also possible for DIV and UDIV using associate_code of MULT.
4249	     However, we would need extra code to verify that the
4250	     multiplication does not overflow, that is, there is no overflow
4251	     in the calculation of new_const.  */
4252	  break;
4253
4254	default:
4255	  break;
4256	}
4257
4258      new = simplify_binary_operation (code, mode,
4259				       const_arg0 ? const_arg0 : folded_arg0,
4260				       const_arg1 ? const_arg1 : folded_arg1);
4261      break;
4262
4263    case 'o':
4264      /* (lo_sum (high X) X) is simply X.  */
4265      if (code == LO_SUM && const_arg0 != 0
4266	  && GET_CODE (const_arg0) == HIGH
4267	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4268	return const_arg1;
4269      break;
4270
4271    case '3':
4272    case 'b':
4273      new = simplify_ternary_operation (code, mode, mode_arg0,
4274					const_arg0 ? const_arg0 : folded_arg0,
4275					const_arg1 ? const_arg1 : folded_arg1,
4276					const_arg2 ? const_arg2 : XEXP (x, 2));
4277      break;
4278
4279    case 'x':
4280      /* Eliminate CONSTANT_P_RTX if its constant.  */
4281      if (code == CONSTANT_P_RTX)
4282	{
4283	  if (const_arg0)
4284	    return const1_rtx;
4285	  if (optimize == 0 || !flag_gcse)
4286	    return const0_rtx;
4287	}
4288      break;
4289    }
4290
4291  return new ? new : x;
4292}
4293
4294/* Return a constant value currently equivalent to X.
4295   Return 0 if we don't know one.  */
4296
4297static rtx
4298equiv_constant (rtx x)
4299{
4300  if (GET_CODE (x) == REG
4301      && REGNO_QTY_VALID_P (REGNO (x)))
4302    {
4303      int x_q = REG_QTY (REGNO (x));
4304      struct qty_table_elem *x_ent = &qty_table[x_q];
4305
4306      if (x_ent->const_rtx)
4307	x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4308    }
4309
4310  if (x == 0 || CONSTANT_P (x))
4311    return x;
4312
4313  /* If X is a MEM, try to fold it outside the context of any insn to see if
4314     it might be equivalent to a constant.  That handles the case where it
4315     is a constant-pool reference.  Then try to look it up in the hash table
4316     in case it is something whose value we have seen before.  */
4317
4318  if (GET_CODE (x) == MEM)
4319    {
4320      struct table_elt *elt;
4321
4322      x = fold_rtx (x, NULL_RTX);
4323      if (CONSTANT_P (x))
4324	return x;
4325
4326      elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4327      if (elt == 0)
4328	return 0;
4329
4330      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4331	if (elt->is_const && CONSTANT_P (elt->exp))
4332	  return elt->exp;
4333    }
4334
4335  return 0;
4336}
4337
4338/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4339   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4340   least-significant part of X.
4341   MODE specifies how big a part of X to return.
4342
4343   If the requested operation cannot be done, 0 is returned.
4344
4345   This is similar to gen_lowpart in emit-rtl.c.  */
4346
4347rtx
4348gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4349{
4350  rtx result = gen_lowpart_common (mode, x);
4351
4352  if (result)
4353    return result;
4354  else if (GET_CODE (x) == MEM)
4355    {
4356      /* This is the only other case we handle.  */
4357      int offset = 0;
4358      rtx new;
4359
4360      if (WORDS_BIG_ENDIAN)
4361	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4362		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4363      if (BYTES_BIG_ENDIAN)
4364	/* Adjust the address so that the address-after-the-data is
4365	   unchanged.  */
4366	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4367		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4368
4369      new = adjust_address_nv (x, mode, offset);
4370      if (! memory_address_p (mode, XEXP (new, 0)))
4371	return 0;
4372
4373      return new;
4374    }
4375  else
4376    return 0;
4377}
4378
4379/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4380   branch.  It will be zero if not.
4381
4382   In certain cases, this can cause us to add an equivalence.  For example,
4383   if we are following the taken case of
4384	if (i == 2)
4385   we can add the fact that `i' and '2' are now equivalent.
4386
4387   In any case, we can record that this comparison was passed.  If the same
4388   comparison is seen later, we will know its value.  */
4389
4390static void
4391record_jump_equiv (rtx insn, int taken)
4392{
4393  int cond_known_true;
4394  rtx op0, op1;
4395  rtx set;
4396  enum machine_mode mode, mode0, mode1;
4397  int reversed_nonequality = 0;
4398  enum rtx_code code;
4399
4400  /* Ensure this is the right kind of insn.  */
4401  if (! any_condjump_p (insn))
4402    return;
4403  set = pc_set (insn);
4404
4405  /* See if this jump condition is known true or false.  */
4406  if (taken)
4407    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4408  else
4409    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4410
4411  /* Get the type of comparison being done and the operands being compared.
4412     If we had to reverse a non-equality condition, record that fact so we
4413     know that it isn't valid for floating-point.  */
4414  code = GET_CODE (XEXP (SET_SRC (set), 0));
4415  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4416  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4417
4418  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4419  if (! cond_known_true)
4420    {
4421      code = reversed_comparison_code_parts (code, op0, op1, insn);
4422
4423      /* Don't remember if we can't find the inverse.  */
4424      if (code == UNKNOWN)
4425	return;
4426    }
4427
4428  /* The mode is the mode of the non-constant.  */
4429  mode = mode0;
4430  if (mode1 != VOIDmode)
4431    mode = mode1;
4432
4433  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4434}
4435
4436/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4437   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4438   Make any useful entries we can with that information.  Called from
4439   above function and called recursively.  */
4440
4441static void
4442record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4443		  rtx op1, int reversed_nonequality)
4444{
4445  unsigned op0_hash, op1_hash;
4446  int op0_in_memory, op1_in_memory;
4447  struct table_elt *op0_elt, *op1_elt;
4448
4449  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4450     we know that they are also equal in the smaller mode (this is also
4451     true for all smaller modes whether or not there is a SUBREG, but
4452     is not worth testing for with no SUBREG).  */
4453
4454  /* Note that GET_MODE (op0) may not equal MODE.  */
4455  if (code == EQ && GET_CODE (op0) == SUBREG
4456      && (GET_MODE_SIZE (GET_MODE (op0))
4457	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4458    {
4459      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4460      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4461
4462      record_jump_cond (code, mode, SUBREG_REG (op0),
4463			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4464			reversed_nonequality);
4465    }
4466
4467  if (code == EQ && GET_CODE (op1) == SUBREG
4468      && (GET_MODE_SIZE (GET_MODE (op1))
4469	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4470    {
4471      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4472      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4473
4474      record_jump_cond (code, mode, SUBREG_REG (op1),
4475			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4476			reversed_nonequality);
4477    }
4478
4479  /* Similarly, if this is an NE comparison, and either is a SUBREG
4480     making a smaller mode, we know the whole thing is also NE.  */
4481
4482  /* Note that GET_MODE (op0) may not equal MODE;
4483     if we test MODE instead, we can get an infinite recursion
4484     alternating between two modes each wider than MODE.  */
4485
4486  if (code == NE && GET_CODE (op0) == SUBREG
4487      && subreg_lowpart_p (op0)
4488      && (GET_MODE_SIZE (GET_MODE (op0))
4489	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4490    {
4491      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4492      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4493
4494      record_jump_cond (code, mode, SUBREG_REG (op0),
4495			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4496			reversed_nonequality);
4497    }
4498
4499  if (code == NE && GET_CODE (op1) == SUBREG
4500      && subreg_lowpart_p (op1)
4501      && (GET_MODE_SIZE (GET_MODE (op1))
4502	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4503    {
4504      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4505      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4506
4507      record_jump_cond (code, mode, SUBREG_REG (op1),
4508			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4509			reversed_nonequality);
4510    }
4511
4512  /* Hash both operands.  */
4513
4514  do_not_record = 0;
4515  hash_arg_in_memory = 0;
4516  op0_hash = HASH (op0, mode);
4517  op0_in_memory = hash_arg_in_memory;
4518
4519  if (do_not_record)
4520    return;
4521
4522  do_not_record = 0;
4523  hash_arg_in_memory = 0;
4524  op1_hash = HASH (op1, mode);
4525  op1_in_memory = hash_arg_in_memory;
4526
4527  if (do_not_record)
4528    return;
4529
4530  /* Look up both operands.  */
4531  op0_elt = lookup (op0, op0_hash, mode);
4532  op1_elt = lookup (op1, op1_hash, mode);
4533
4534  /* If both operands are already equivalent or if they are not in the
4535     table but are identical, do nothing.  */
4536  if ((op0_elt != 0 && op1_elt != 0
4537       && op0_elt->first_same_value == op1_elt->first_same_value)
4538      || op0 == op1 || rtx_equal_p (op0, op1))
4539    return;
4540
4541  /* If we aren't setting two things equal all we can do is save this
4542     comparison.   Similarly if this is floating-point.  In the latter
4543     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4544     If we record the equality, we might inadvertently delete code
4545     whose intent was to change -0 to +0.  */
4546
4547  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4548    {
4549      struct qty_table_elem *ent;
4550      int qty;
4551
4552      /* If we reversed a floating-point comparison, if OP0 is not a
4553	 register, or if OP1 is neither a register or constant, we can't
4554	 do anything.  */
4555
4556      if (GET_CODE (op1) != REG)
4557	op1 = equiv_constant (op1);
4558
4559      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4560	  || GET_CODE (op0) != REG || op1 == 0)
4561	return;
4562
4563      /* Put OP0 in the hash table if it isn't already.  This gives it a
4564	 new quantity number.  */
4565      if (op0_elt == 0)
4566	{
4567	  if (insert_regs (op0, NULL, 0))
4568	    {
4569	      rehash_using_reg (op0);
4570	      op0_hash = HASH (op0, mode);
4571
4572	      /* If OP0 is contained in OP1, this changes its hash code
4573		 as well.  Faster to rehash than to check, except
4574		 for the simple case of a constant.  */
4575	      if (! CONSTANT_P (op1))
4576		op1_hash = HASH (op1,mode);
4577	    }
4578
4579	  op0_elt = insert (op0, NULL, op0_hash, mode);
4580	  op0_elt->in_memory = op0_in_memory;
4581	}
4582
4583      qty = REG_QTY (REGNO (op0));
4584      ent = &qty_table[qty];
4585
4586      ent->comparison_code = code;
4587      if (GET_CODE (op1) == REG)
4588	{
4589	  /* Look it up again--in case op0 and op1 are the same.  */
4590	  op1_elt = lookup (op1, op1_hash, mode);
4591
4592	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4593	  if (op1_elt == 0)
4594	    {
4595	      if (insert_regs (op1, NULL, 0))
4596		{
4597		  rehash_using_reg (op1);
4598		  op1_hash = HASH (op1, mode);
4599		}
4600
4601	      op1_elt = insert (op1, NULL, op1_hash, mode);
4602	      op1_elt->in_memory = op1_in_memory;
4603	    }
4604
4605	  ent->comparison_const = NULL_RTX;
4606	  ent->comparison_qty = REG_QTY (REGNO (op1));
4607	}
4608      else
4609	{
4610	  ent->comparison_const = op1;
4611	  ent->comparison_qty = -1;
4612	}
4613
4614      return;
4615    }
4616
4617  /* If either side is still missing an equivalence, make it now,
4618     then merge the equivalences.  */
4619
4620  if (op0_elt == 0)
4621    {
4622      if (insert_regs (op0, NULL, 0))
4623	{
4624	  rehash_using_reg (op0);
4625	  op0_hash = HASH (op0, mode);
4626	}
4627
4628      op0_elt = insert (op0, NULL, op0_hash, mode);
4629      op0_elt->in_memory = op0_in_memory;
4630    }
4631
4632  if (op1_elt == 0)
4633    {
4634      if (insert_regs (op1, NULL, 0))
4635	{
4636	  rehash_using_reg (op1);
4637	  op1_hash = HASH (op1, mode);
4638	}
4639
4640      op1_elt = insert (op1, NULL, op1_hash, mode);
4641      op1_elt->in_memory = op1_in_memory;
4642    }
4643
4644  merge_equiv_classes (op0_elt, op1_elt);
4645  last_jump_equiv_class = op0_elt;
4646}
4647
4648/* CSE processing for one instruction.
4649   First simplify sources and addresses of all assignments
4650   in the instruction, using previously-computed equivalents values.
4651   Then install the new sources and destinations in the table
4652   of available values.
4653
4654   If LIBCALL_INSN is nonzero, don't record any equivalence made in
4655   the insn.  It means that INSN is inside libcall block.  In this
4656   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4657
4658/* Data on one SET contained in the instruction.  */
4659
4660struct set
4661{
4662  /* The SET rtx itself.  */
4663  rtx rtl;
4664  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4665  rtx src;
4666  /* The hash-table element for the SET_SRC of the SET.  */
4667  struct table_elt *src_elt;
4668  /* Hash value for the SET_SRC.  */
4669  unsigned src_hash;
4670  /* Hash value for the SET_DEST.  */
4671  unsigned dest_hash;
4672  /* The SET_DEST, with SUBREG, etc., stripped.  */
4673  rtx inner_dest;
4674  /* Nonzero if the SET_SRC is in memory.  */
4675  char src_in_memory;
4676  /* Nonzero if the SET_SRC contains something
4677     whose value cannot be predicted and understood.  */
4678  char src_volatile;
4679  /* Original machine mode, in case it becomes a CONST_INT.
4680     The size of this field should match the size of the mode
4681     field of struct rtx_def (see rtl.h).  */
4682  ENUM_BITFIELD(machine_mode) mode : 8;
4683  /* A constant equivalent for SET_SRC, if any.  */
4684  rtx src_const;
4685  /* Original SET_SRC value used for libcall notes.  */
4686  rtx orig_src;
4687  /* Hash value of constant equivalent for SET_SRC.  */
4688  unsigned src_const_hash;
4689  /* Table entry for constant equivalent for SET_SRC, if any.  */
4690  struct table_elt *src_const_elt;
4691};
4692
4693static void
4694cse_insn (rtx insn, rtx libcall_insn)
4695{
4696  rtx x = PATTERN (insn);
4697  int i;
4698  rtx tem;
4699  int n_sets = 0;
4700
4701#ifdef HAVE_cc0
4702  /* Records what this insn does to set CC0.  */
4703  rtx this_insn_cc0 = 0;
4704  enum machine_mode this_insn_cc0_mode = VOIDmode;
4705#endif
4706
4707  rtx src_eqv = 0;
4708  struct table_elt *src_eqv_elt = 0;
4709  int src_eqv_volatile = 0;
4710  int src_eqv_in_memory = 0;
4711  unsigned src_eqv_hash = 0;
4712
4713  struct set *sets = (struct set *) 0;
4714
4715  this_insn = insn;
4716
4717  /* Find all the SETs and CLOBBERs in this instruction.
4718     Record all the SETs in the array `set' and count them.
4719     Also determine whether there is a CLOBBER that invalidates
4720     all memory references, or all references at varying addresses.  */
4721
4722  if (GET_CODE (insn) == CALL_INSN)
4723    {
4724      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4725	{
4726	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4727	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4728	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4729	}
4730    }
4731
4732  if (GET_CODE (x) == SET)
4733    {
4734      sets = alloca (sizeof (struct set));
4735      sets[0].rtl = x;
4736
4737      /* Ignore SETs that are unconditional jumps.
4738	 They never need cse processing, so this does not hurt.
4739	 The reason is not efficiency but rather
4740	 so that we can test at the end for instructions
4741	 that have been simplified to unconditional jumps
4742	 and not be misled by unchanged instructions
4743	 that were unconditional jumps to begin with.  */
4744      if (SET_DEST (x) == pc_rtx
4745	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4746	;
4747
4748      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4749	 The hard function value register is used only once, to copy to
4750	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4751	 Ensure we invalidate the destination register.  On the 80386 no
4752	 other code would invalidate it since it is a fixed_reg.
4753	 We need not check the return of apply_change_group; see canon_reg.  */
4754
4755      else if (GET_CODE (SET_SRC (x)) == CALL)
4756	{
4757	  canon_reg (SET_SRC (x), insn);
4758	  apply_change_group ();
4759	  fold_rtx (SET_SRC (x), insn);
4760	  invalidate (SET_DEST (x), VOIDmode);
4761	}
4762      else
4763	n_sets = 1;
4764    }
4765  else if (GET_CODE (x) == PARALLEL)
4766    {
4767      int lim = XVECLEN (x, 0);
4768
4769      sets = alloca (lim * sizeof (struct set));
4770
4771      /* Find all regs explicitly clobbered in this insn,
4772	 and ensure they are not replaced with any other regs
4773	 elsewhere in this insn.
4774	 When a reg that is clobbered is also used for input,
4775	 we should presume that that is for a reason,
4776	 and we should not substitute some other register
4777	 which is not supposed to be clobbered.
4778	 Therefore, this loop cannot be merged into the one below
4779	 because a CALL may precede a CLOBBER and refer to the
4780	 value clobbered.  We must not let a canonicalization do
4781	 anything in that case.  */
4782      for (i = 0; i < lim; i++)
4783	{
4784	  rtx y = XVECEXP (x, 0, i);
4785	  if (GET_CODE (y) == CLOBBER)
4786	    {
4787	      rtx clobbered = XEXP (y, 0);
4788
4789	      if (GET_CODE (clobbered) == REG
4790		  || GET_CODE (clobbered) == SUBREG)
4791		invalidate (clobbered, VOIDmode);
4792	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4793		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4794		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4795	    }
4796	}
4797
4798      for (i = 0; i < lim; i++)
4799	{
4800	  rtx y = XVECEXP (x, 0, i);
4801	  if (GET_CODE (y) == SET)
4802	    {
4803	      /* As above, we ignore unconditional jumps and call-insns and
4804		 ignore the result of apply_change_group.  */
4805	      if (GET_CODE (SET_SRC (y)) == CALL)
4806		{
4807		  canon_reg (SET_SRC (y), insn);
4808		  apply_change_group ();
4809		  fold_rtx (SET_SRC (y), insn);
4810		  invalidate (SET_DEST (y), VOIDmode);
4811		}
4812	      else if (SET_DEST (y) == pc_rtx
4813		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4814		;
4815	      else
4816		sets[n_sets++].rtl = y;
4817	    }
4818	  else if (GET_CODE (y) == CLOBBER)
4819	    {
4820	      /* If we clobber memory, canon the address.
4821		 This does nothing when a register is clobbered
4822		 because we have already invalidated the reg.  */
4823	      if (GET_CODE (XEXP (y, 0)) == MEM)
4824		canon_reg (XEXP (y, 0), NULL_RTX);
4825	    }
4826	  else if (GET_CODE (y) == USE
4827		   && ! (GET_CODE (XEXP (y, 0)) == REG
4828			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4829	    canon_reg (y, NULL_RTX);
4830	  else if (GET_CODE (y) == CALL)
4831	    {
4832	      /* The result of apply_change_group can be ignored; see
4833		 canon_reg.  */
4834	      canon_reg (y, insn);
4835	      apply_change_group ();
4836	      fold_rtx (y, insn);
4837	    }
4838	}
4839    }
4840  else if (GET_CODE (x) == CLOBBER)
4841    {
4842      if (GET_CODE (XEXP (x, 0)) == MEM)
4843	canon_reg (XEXP (x, 0), NULL_RTX);
4844    }
4845
4846  /* Canonicalize a USE of a pseudo register or memory location.  */
4847  else if (GET_CODE (x) == USE
4848	   && ! (GET_CODE (XEXP (x, 0)) == REG
4849		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4850    canon_reg (XEXP (x, 0), NULL_RTX);
4851  else if (GET_CODE (x) == CALL)
4852    {
4853      /* The result of apply_change_group can be ignored; see canon_reg.  */
4854      canon_reg (x, insn);
4855      apply_change_group ();
4856      fold_rtx (x, insn);
4857    }
4858
4859  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4860     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4861     is handled specially for this case, and if it isn't set, then there will
4862     be no equivalence for the destination.  */
4863  if (n_sets == 1 && REG_NOTES (insn) != 0
4864      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4865      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4866	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4867    {
4868      src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4869      XEXP (tem, 0) = src_eqv;
4870    }
4871
4872  /* Canonicalize sources and addresses of destinations.
4873     We do this in a separate pass to avoid problems when a MATCH_DUP is
4874     present in the insn pattern.  In that case, we want to ensure that
4875     we don't break the duplicate nature of the pattern.  So we will replace
4876     both operands at the same time.  Otherwise, we would fail to find an
4877     equivalent substitution in the loop calling validate_change below.
4878
4879     We used to suppress canonicalization of DEST if it appears in SRC,
4880     but we don't do this any more.  */
4881
4882  for (i = 0; i < n_sets; i++)
4883    {
4884      rtx dest = SET_DEST (sets[i].rtl);
4885      rtx src = SET_SRC (sets[i].rtl);
4886      rtx new = canon_reg (src, insn);
4887      int insn_code;
4888
4889      sets[i].orig_src = src;
4890      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4891	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4892	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4893	  || (insn_code = recog_memoized (insn)) < 0
4894	  || insn_data[insn_code].n_dups > 0)
4895	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4896      else
4897	SET_SRC (sets[i].rtl) = new;
4898
4899      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4900	{
4901	  validate_change (insn, &XEXP (dest, 1),
4902			   canon_reg (XEXP (dest, 1), insn), 1);
4903	  validate_change (insn, &XEXP (dest, 2),
4904			   canon_reg (XEXP (dest, 2), insn), 1);
4905	}
4906
4907      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4908	     || GET_CODE (dest) == ZERO_EXTRACT
4909	     || GET_CODE (dest) == SIGN_EXTRACT)
4910	dest = XEXP (dest, 0);
4911
4912      if (GET_CODE (dest) == MEM)
4913	canon_reg (dest, insn);
4914    }
4915
4916  /* Now that we have done all the replacements, we can apply the change
4917     group and see if they all work.  Note that this will cause some
4918     canonicalizations that would have worked individually not to be applied
4919     because some other canonicalization didn't work, but this should not
4920     occur often.
4921
4922     The result of apply_change_group can be ignored; see canon_reg.  */
4923
4924  apply_change_group ();
4925
4926  /* Set sets[i].src_elt to the class each source belongs to.
4927     Detect assignments from or to volatile things
4928     and set set[i] to zero so they will be ignored
4929     in the rest of this function.
4930
4931     Nothing in this loop changes the hash table or the register chains.  */
4932
4933  for (i = 0; i < n_sets; i++)
4934    {
4935      rtx src, dest;
4936      rtx src_folded;
4937      struct table_elt *elt = 0, *p;
4938      enum machine_mode mode;
4939      rtx src_eqv_here;
4940      rtx src_const = 0;
4941      rtx src_related = 0;
4942      struct table_elt *src_const_elt = 0;
4943      int src_cost = MAX_COST;
4944      int src_eqv_cost = MAX_COST;
4945      int src_folded_cost = MAX_COST;
4946      int src_related_cost = MAX_COST;
4947      int src_elt_cost = MAX_COST;
4948      int src_regcost = MAX_COST;
4949      int src_eqv_regcost = MAX_COST;
4950      int src_folded_regcost = MAX_COST;
4951      int src_related_regcost = MAX_COST;
4952      int src_elt_regcost = MAX_COST;
4953      /* Set nonzero if we need to call force_const_mem on with the
4954	 contents of src_folded before using it.  */
4955      int src_folded_force_flag = 0;
4956
4957      dest = SET_DEST (sets[i].rtl);
4958      src = SET_SRC (sets[i].rtl);
4959
4960      /* If SRC is a constant that has no machine mode,
4961	 hash it with the destination's machine mode.
4962	 This way we can keep different modes separate.  */
4963
4964      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4965      sets[i].mode = mode;
4966
4967      if (src_eqv)
4968	{
4969	  enum machine_mode eqvmode = mode;
4970	  if (GET_CODE (dest) == STRICT_LOW_PART)
4971	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4972	  do_not_record = 0;
4973	  hash_arg_in_memory = 0;
4974	  src_eqv_hash = HASH (src_eqv, eqvmode);
4975
4976	  /* Find the equivalence class for the equivalent expression.  */
4977
4978	  if (!do_not_record)
4979	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4980
4981	  src_eqv_volatile = do_not_record;
4982	  src_eqv_in_memory = hash_arg_in_memory;
4983	}
4984
4985      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4986	 value of the INNER register, not the destination.  So it is not
4987	 a valid substitution for the source.  But save it for later.  */
4988      if (GET_CODE (dest) == STRICT_LOW_PART)
4989	src_eqv_here = 0;
4990      else
4991	src_eqv_here = src_eqv;
4992
4993      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4994	 simplified result, which may not necessarily be valid.  */
4995      src_folded = fold_rtx (src, insn);
4996
4997#if 0
4998      /* ??? This caused bad code to be generated for the m68k port with -O2.
4999	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5000	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5001	 At the end we will add src and src_const to the same equivalence
5002	 class.  We now have 3 and -1 on the same equivalence class.  This
5003	 causes later instructions to be mis-optimized.  */
5004      /* If storing a constant in a bitfield, pre-truncate the constant
5005	 so we will be able to record it later.  */
5006      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5007	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5008	{
5009	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5010
5011	  if (GET_CODE (src) == CONST_INT
5012	      && GET_CODE (width) == CONST_INT
5013	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5014	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5015	    src_folded
5016	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5017					  << INTVAL (width)) - 1));
5018	}
5019#endif
5020
5021      /* Compute SRC's hash code, and also notice if it
5022	 should not be recorded at all.  In that case,
5023	 prevent any further processing of this assignment.  */
5024      do_not_record = 0;
5025      hash_arg_in_memory = 0;
5026
5027      sets[i].src = src;
5028      sets[i].src_hash = HASH (src, mode);
5029      sets[i].src_volatile = do_not_record;
5030      sets[i].src_in_memory = hash_arg_in_memory;
5031
5032      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5033	 a pseudo, do not record SRC.  Using SRC as a replacement for
5034	 anything else will be incorrect in that situation.  Note that
5035	 this usually occurs only for stack slots, in which case all the
5036	 RTL would be referring to SRC, so we don't lose any optimization
5037	 opportunities by not having SRC in the hash table.  */
5038
5039      if (GET_CODE (src) == MEM
5040	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5041	  && GET_CODE (dest) == REG
5042	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5043	sets[i].src_volatile = 1;
5044
5045#if 0
5046      /* It is no longer clear why we used to do this, but it doesn't
5047	 appear to still be needed.  So let's try without it since this
5048	 code hurts cse'ing widened ops.  */
5049      /* If source is a perverse subreg (such as QI treated as an SI),
5050	 treat it as volatile.  It may do the work of an SI in one context
5051	 where the extra bits are not being used, but cannot replace an SI
5052	 in general.  */
5053      if (GET_CODE (src) == SUBREG
5054	  && (GET_MODE_SIZE (GET_MODE (src))
5055	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5056	sets[i].src_volatile = 1;
5057#endif
5058
5059      /* Locate all possible equivalent forms for SRC.  Try to replace
5060         SRC in the insn with each cheaper equivalent.
5061
5062         We have the following types of equivalents: SRC itself, a folded
5063         version, a value given in a REG_EQUAL note, or a value related
5064	 to a constant.
5065
5066         Each of these equivalents may be part of an additional class
5067         of equivalents (if more than one is in the table, they must be in
5068         the same class; we check for this).
5069
5070	 If the source is volatile, we don't do any table lookups.
5071
5072         We note any constant equivalent for possible later use in a
5073         REG_NOTE.  */
5074
5075      if (!sets[i].src_volatile)
5076	elt = lookup (src, sets[i].src_hash, mode);
5077
5078      sets[i].src_elt = elt;
5079
5080      if (elt && src_eqv_here && src_eqv_elt)
5081	{
5082	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5083	    {
5084	      /* The REG_EQUAL is indicating that two formerly distinct
5085		 classes are now equivalent.  So merge them.  */
5086	      merge_equiv_classes (elt, src_eqv_elt);
5087	      src_eqv_hash = HASH (src_eqv, elt->mode);
5088	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5089	    }
5090
5091	  src_eqv_here = 0;
5092	}
5093
5094      else if (src_eqv_elt)
5095	elt = src_eqv_elt;
5096
5097      /* Try to find a constant somewhere and record it in `src_const'.
5098	 Record its table element, if any, in `src_const_elt'.  Look in
5099	 any known equivalences first.  (If the constant is not in the
5100	 table, also set `sets[i].src_const_hash').  */
5101      if (elt)
5102	for (p = elt->first_same_value; p; p = p->next_same_value)
5103	  if (p->is_const)
5104	    {
5105	      src_const = p->exp;
5106	      src_const_elt = elt;
5107	      break;
5108	    }
5109
5110      if (src_const == 0
5111	  && (CONSTANT_P (src_folded)
5112	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5113		 "constant" here so we will record it. This allows us
5114		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5115	      || (GET_CODE (src_folded) == MINUS
5116		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5117		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5118	src_const = src_folded, src_const_elt = elt;
5119      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5120	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5121
5122      /* If we don't know if the constant is in the table, get its
5123	 hash code and look it up.  */
5124      if (src_const && src_const_elt == 0)
5125	{
5126	  sets[i].src_const_hash = HASH (src_const, mode);
5127	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5128	}
5129
5130      sets[i].src_const = src_const;
5131      sets[i].src_const_elt = src_const_elt;
5132
5133      /* If the constant and our source are both in the table, mark them as
5134	 equivalent.  Otherwise, if a constant is in the table but the source
5135	 isn't, set ELT to it.  */
5136      if (src_const_elt && elt
5137	  && src_const_elt->first_same_value != elt->first_same_value)
5138	merge_equiv_classes (elt, src_const_elt);
5139      else if (src_const_elt && elt == 0)
5140	elt = src_const_elt;
5141
5142      /* See if there is a register linearly related to a constant
5143         equivalent of SRC.  */
5144      if (src_const
5145	  && (GET_CODE (src_const) == CONST
5146	      || (src_const_elt && src_const_elt->related_value != 0)))
5147	{
5148	  src_related = use_related_value (src_const, src_const_elt);
5149	  if (src_related)
5150	    {
5151	      struct table_elt *src_related_elt
5152		= lookup (src_related, HASH (src_related, mode), mode);
5153	      if (src_related_elt && elt)
5154		{
5155		  if (elt->first_same_value
5156		      != src_related_elt->first_same_value)
5157		    /* This can occur when we previously saw a CONST
5158		       involving a SYMBOL_REF and then see the SYMBOL_REF
5159		       twice.  Merge the involved classes.  */
5160		    merge_equiv_classes (elt, src_related_elt);
5161
5162		  src_related = 0;
5163		  src_related_elt = 0;
5164		}
5165	      else if (src_related_elt && elt == 0)
5166		elt = src_related_elt;
5167	    }
5168	}
5169
5170      /* See if we have a CONST_INT that is already in a register in a
5171	 wider mode.  */
5172
5173      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5174	  && GET_MODE_CLASS (mode) == MODE_INT
5175	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5176	{
5177	  enum machine_mode wider_mode;
5178
5179	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5180	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5181	       && src_related == 0;
5182	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5183	    {
5184	      struct table_elt *const_elt
5185		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5186
5187	      if (const_elt == 0)
5188		continue;
5189
5190	      for (const_elt = const_elt->first_same_value;
5191		   const_elt; const_elt = const_elt->next_same_value)
5192		if (GET_CODE (const_elt->exp) == REG)
5193		  {
5194		    src_related = gen_lowpart_if_possible (mode,
5195							   const_elt->exp);
5196		    break;
5197		  }
5198	    }
5199	}
5200
5201      /* Another possibility is that we have an AND with a constant in
5202	 a mode narrower than a word.  If so, it might have been generated
5203	 as part of an "if" which would narrow the AND.  If we already
5204	 have done the AND in a wider mode, we can use a SUBREG of that
5205	 value.  */
5206
5207      if (flag_expensive_optimizations && ! src_related
5208	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5209	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5210	{
5211	  enum machine_mode tmode;
5212	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5213
5214	  for (tmode = GET_MODE_WIDER_MODE (mode);
5215	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5216	       tmode = GET_MODE_WIDER_MODE (tmode))
5217	    {
5218	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5219	      struct table_elt *larger_elt;
5220
5221	      if (inner)
5222		{
5223		  PUT_MODE (new_and, tmode);
5224		  XEXP (new_and, 0) = inner;
5225		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5226		  if (larger_elt == 0)
5227		    continue;
5228
5229		  for (larger_elt = larger_elt->first_same_value;
5230		       larger_elt; larger_elt = larger_elt->next_same_value)
5231		    if (GET_CODE (larger_elt->exp) == REG)
5232		      {
5233			src_related
5234			  = gen_lowpart_if_possible (mode, larger_elt->exp);
5235			break;
5236		      }
5237
5238		  if (src_related)
5239		    break;
5240		}
5241	    }
5242	}
5243
5244#ifdef LOAD_EXTEND_OP
5245      /* See if a MEM has already been loaded with a widening operation;
5246	 if it has, we can use a subreg of that.  Many CISC machines
5247	 also have such operations, but this is only likely to be
5248	 beneficial these machines.  */
5249
5250      if (flag_expensive_optimizations && src_related == 0
5251	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5252	  && GET_MODE_CLASS (mode) == MODE_INT
5253	  && GET_CODE (src) == MEM && ! do_not_record
5254	  && LOAD_EXTEND_OP (mode) != NIL)
5255	{
5256	  enum machine_mode tmode;
5257
5258	  /* Set what we are trying to extend and the operation it might
5259	     have been extended with.  */
5260	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5261	  XEXP (memory_extend_rtx, 0) = src;
5262
5263	  for (tmode = GET_MODE_WIDER_MODE (mode);
5264	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5265	       tmode = GET_MODE_WIDER_MODE (tmode))
5266	    {
5267	      struct table_elt *larger_elt;
5268
5269	      PUT_MODE (memory_extend_rtx, tmode);
5270	      larger_elt = lookup (memory_extend_rtx,
5271				   HASH (memory_extend_rtx, tmode), tmode);
5272	      if (larger_elt == 0)
5273		continue;
5274
5275	      for (larger_elt = larger_elt->first_same_value;
5276		   larger_elt; larger_elt = larger_elt->next_same_value)
5277		if (GET_CODE (larger_elt->exp) == REG)
5278		  {
5279		    src_related = gen_lowpart_if_possible (mode,
5280							   larger_elt->exp);
5281		    break;
5282		  }
5283
5284	      if (src_related)
5285		break;
5286	    }
5287	}
5288#endif /* LOAD_EXTEND_OP */
5289
5290      if (src == src_folded)
5291	src_folded = 0;
5292
5293      /* At this point, ELT, if nonzero, points to a class of expressions
5294         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5295	 and SRC_RELATED, if nonzero, each contain additional equivalent
5296	 expressions.  Prune these latter expressions by deleting expressions
5297	 already in the equivalence class.
5298
5299	 Check for an equivalent identical to the destination.  If found,
5300	 this is the preferred equivalent since it will likely lead to
5301	 elimination of the insn.  Indicate this by placing it in
5302	 `src_related'.  */
5303
5304      if (elt)
5305	elt = elt->first_same_value;
5306      for (p = elt; p; p = p->next_same_value)
5307	{
5308	  enum rtx_code code = GET_CODE (p->exp);
5309
5310	  /* If the expression is not valid, ignore it.  Then we do not
5311	     have to check for validity below.  In most cases, we can use
5312	     `rtx_equal_p', since canonicalization has already been done.  */
5313	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5314	    continue;
5315
5316	  /* Also skip paradoxical subregs, unless that's what we're
5317	     looking for.  */
5318	  if (code == SUBREG
5319	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5320		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5321	      && ! (src != 0
5322		    && GET_CODE (src) == SUBREG
5323		    && GET_MODE (src) == GET_MODE (p->exp)
5324		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5325			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5326	    continue;
5327
5328	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5329	    src = 0;
5330	  else if (src_folded && GET_CODE (src_folded) == code
5331		   && rtx_equal_p (src_folded, p->exp))
5332	    src_folded = 0;
5333	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5334		   && rtx_equal_p (src_eqv_here, p->exp))
5335	    src_eqv_here = 0;
5336	  else if (src_related && GET_CODE (src_related) == code
5337		   && rtx_equal_p (src_related, p->exp))
5338	    src_related = 0;
5339
5340	  /* This is the same as the destination of the insns, we want
5341	     to prefer it.  Copy it to src_related.  The code below will
5342	     then give it a negative cost.  */
5343	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5344	    src_related = dest;
5345	}
5346
5347      /* Find the cheapest valid equivalent, trying all the available
5348         possibilities.  Prefer items not in the hash table to ones
5349         that are when they are equal cost.  Note that we can never
5350         worsen an insn as the current contents will also succeed.
5351	 If we find an equivalent identical to the destination, use it as best,
5352	 since this insn will probably be eliminated in that case.  */
5353      if (src)
5354	{
5355	  if (rtx_equal_p (src, dest))
5356	    src_cost = src_regcost = -1;
5357	  else
5358	    {
5359	      src_cost = COST (src);
5360	      src_regcost = approx_reg_cost (src);
5361	    }
5362	}
5363
5364      if (src_eqv_here)
5365	{
5366	  if (rtx_equal_p (src_eqv_here, dest))
5367	    src_eqv_cost = src_eqv_regcost = -1;
5368	  else
5369	    {
5370	      src_eqv_cost = COST (src_eqv_here);
5371	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5372	    }
5373	}
5374
5375      if (src_folded)
5376	{
5377	  if (rtx_equal_p (src_folded, dest))
5378	    src_folded_cost = src_folded_regcost = -1;
5379	  else
5380	    {
5381	      src_folded_cost = COST (src_folded);
5382	      src_folded_regcost = approx_reg_cost (src_folded);
5383	    }
5384	}
5385
5386      if (src_related)
5387	{
5388	  if (rtx_equal_p (src_related, dest))
5389	    src_related_cost = src_related_regcost = -1;
5390	  else
5391	    {
5392	      src_related_cost = COST (src_related);
5393	      src_related_regcost = approx_reg_cost (src_related);
5394	    }
5395	}
5396
5397      /* If this was an indirect jump insn, a known label will really be
5398	 cheaper even though it looks more expensive.  */
5399      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5400	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5401
5402      /* Terminate loop when replacement made.  This must terminate since
5403         the current contents will be tested and will always be valid.  */
5404      while (1)
5405	{
5406	  rtx trial;
5407
5408	  /* Skip invalid entries.  */
5409	  while (elt && GET_CODE (elt->exp) != REG
5410		 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5411	    elt = elt->next_same_value;
5412
5413	  /* A paradoxical subreg would be bad here: it'll be the right
5414	     size, but later may be adjusted so that the upper bits aren't
5415	     what we want.  So reject it.  */
5416	  if (elt != 0
5417	      && GET_CODE (elt->exp) == SUBREG
5418	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5419		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5420	      /* It is okay, though, if the rtx we're trying to match
5421		 will ignore any of the bits we can't predict.  */
5422	      && ! (src != 0
5423		    && GET_CODE (src) == SUBREG
5424		    && GET_MODE (src) == GET_MODE (elt->exp)
5425		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5426			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5427	    {
5428	      elt = elt->next_same_value;
5429	      continue;
5430	    }
5431
5432	  if (elt)
5433	    {
5434	      src_elt_cost = elt->cost;
5435	      src_elt_regcost = elt->regcost;
5436	    }
5437
5438	  /* Find cheapest and skip it for the next time.   For items
5439	     of equal cost, use this order:
5440	     src_folded, src, src_eqv, src_related and hash table entry.  */
5441	  if (src_folded
5442	      && preferrable (src_folded_cost, src_folded_regcost,
5443			      src_cost, src_regcost) <= 0
5444	      && preferrable (src_folded_cost, src_folded_regcost,
5445			      src_eqv_cost, src_eqv_regcost) <= 0
5446	      && preferrable (src_folded_cost, src_folded_regcost,
5447			      src_related_cost, src_related_regcost) <= 0
5448	      && preferrable (src_folded_cost, src_folded_regcost,
5449			      src_elt_cost, src_elt_regcost) <= 0)
5450	    {
5451	      trial = src_folded, src_folded_cost = MAX_COST;
5452	      if (src_folded_force_flag)
5453		{
5454		  rtx forced = force_const_mem (mode, trial);
5455		  if (forced)
5456		    trial = forced;
5457		}
5458	    }
5459	  else if (src
5460		   && preferrable (src_cost, src_regcost,
5461				   src_eqv_cost, src_eqv_regcost) <= 0
5462		   && preferrable (src_cost, src_regcost,
5463				   src_related_cost, src_related_regcost) <= 0
5464		   && preferrable (src_cost, src_regcost,
5465				   src_elt_cost, src_elt_regcost) <= 0)
5466	    trial = src, src_cost = MAX_COST;
5467	  else if (src_eqv_here
5468		   && preferrable (src_eqv_cost, src_eqv_regcost,
5469				   src_related_cost, src_related_regcost) <= 0
5470		   && preferrable (src_eqv_cost, src_eqv_regcost,
5471				   src_elt_cost, src_elt_regcost) <= 0)
5472	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5473	  else if (src_related
5474		   && preferrable (src_related_cost, src_related_regcost,
5475				   src_elt_cost, src_elt_regcost) <= 0)
5476	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5477	  else
5478	    {
5479	      trial = copy_rtx (elt->exp);
5480	      elt = elt->next_same_value;
5481	      src_elt_cost = MAX_COST;
5482	    }
5483
5484	  /* We don't normally have an insn matching (set (pc) (pc)), so
5485	     check for this separately here.  We will delete such an
5486	     insn below.
5487
5488	     For other cases such as a table jump or conditional jump
5489	     where we know the ultimate target, go ahead and replace the
5490	     operand.  While that may not make a valid insn, we will
5491	     reemit the jump below (and also insert any necessary
5492	     barriers).  */
5493	  if (n_sets == 1 && dest == pc_rtx
5494	      && (trial == pc_rtx
5495		  || (GET_CODE (trial) == LABEL_REF
5496		      && ! condjump_p (insn))))
5497	    {
5498	      SET_SRC (sets[i].rtl) = trial;
5499	      cse_jumps_altered = 1;
5500	      break;
5501	    }
5502
5503	  /* Look for a substitution that makes a valid insn.  */
5504	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5505	    {
5506	      rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5507
5508	      /* If we just made a substitution inside a libcall, then we
5509		 need to make the same substitution in any notes attached
5510		 to the RETVAL insn.  */
5511	      if (libcall_insn
5512		  && (GET_CODE (sets[i].orig_src) == REG
5513		      || GET_CODE (sets[i].orig_src) == SUBREG
5514		      || GET_CODE (sets[i].orig_src) == MEM))
5515		simplify_replace_rtx (REG_NOTES (libcall_insn),
5516				      sets[i].orig_src, copy_rtx (new));
5517
5518	      /* The result of apply_change_group can be ignored; see
5519		 canon_reg.  */
5520
5521	      validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5522	      apply_change_group ();
5523	      break;
5524	    }
5525
5526	  /* If we previously found constant pool entries for
5527	     constants and this is a constant, try making a
5528	     pool entry.  Put it in src_folded unless we already have done
5529	     this since that is where it likely came from.  */
5530
5531	  else if (constant_pool_entries_cost
5532		   && CONSTANT_P (trial)
5533		   /* Reject cases that will abort in decode_rtx_const.
5534		      On the alpha when simplifying a switch, we get
5535		      (const (truncate (minus (label_ref) (label_ref)))).  */
5536		   && ! (GET_CODE (trial) == CONST
5537			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5538		   /* Likewise on IA-64, except without the truncate.  */
5539		   && ! (GET_CODE (trial) == CONST
5540			 && GET_CODE (XEXP (trial, 0)) == MINUS
5541			 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5542			 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5543		   && (src_folded == 0
5544		       || (GET_CODE (src_folded) != MEM
5545			   && ! src_folded_force_flag))
5546		   && GET_MODE_CLASS (mode) != MODE_CC
5547		   && mode != VOIDmode)
5548	    {
5549	      src_folded_force_flag = 1;
5550	      src_folded = trial;
5551	      src_folded_cost = constant_pool_entries_cost;
5552	      src_folded_regcost = constant_pool_entries_regcost;
5553	    }
5554	}
5555
5556      src = SET_SRC (sets[i].rtl);
5557
5558      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5559	 However, there is an important exception:  If both are registers
5560	 that are not the head of their equivalence class, replace SET_SRC
5561	 with the head of the class.  If we do not do this, we will have
5562	 both registers live over a portion of the basic block.  This way,
5563	 their lifetimes will likely abut instead of overlapping.  */
5564      if (GET_CODE (dest) == REG
5565	  && REGNO_QTY_VALID_P (REGNO (dest)))
5566	{
5567	  int dest_q = REG_QTY (REGNO (dest));
5568	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5569
5570	  if (dest_ent->mode == GET_MODE (dest)
5571	      && dest_ent->first_reg != REGNO (dest)
5572	      && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5573	      /* Don't do this if the original insn had a hard reg as
5574		 SET_SRC or SET_DEST.  */
5575	      && (GET_CODE (sets[i].src) != REG
5576		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5577	      && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5578	    /* We can't call canon_reg here because it won't do anything if
5579	       SRC is a hard register.  */
5580	    {
5581	      int src_q = REG_QTY (REGNO (src));
5582	      struct qty_table_elem *src_ent = &qty_table[src_q];
5583	      int first = src_ent->first_reg;
5584	      rtx new_src
5585		= (first >= FIRST_PSEUDO_REGISTER
5586		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5587
5588	      /* We must use validate-change even for this, because this
5589		 might be a special no-op instruction, suitable only to
5590		 tag notes onto.  */
5591	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5592		{
5593		  src = new_src;
5594		  /* If we had a constant that is cheaper than what we are now
5595		     setting SRC to, use that constant.  We ignored it when we
5596		     thought we could make this into a no-op.  */
5597		  if (src_const && COST (src_const) < COST (src)
5598		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5599					  src_const, 0))
5600		    src = src_const;
5601		}
5602	    }
5603	}
5604
5605      /* If we made a change, recompute SRC values.  */
5606      if (src != sets[i].src)
5607	{
5608	  cse_altered = 1;
5609	  do_not_record = 0;
5610	  hash_arg_in_memory = 0;
5611	  sets[i].src = src;
5612	  sets[i].src_hash = HASH (src, mode);
5613	  sets[i].src_volatile = do_not_record;
5614	  sets[i].src_in_memory = hash_arg_in_memory;
5615	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5616	}
5617
5618      /* If this is a single SET, we are setting a register, and we have an
5619	 equivalent constant, we want to add a REG_NOTE.   We don't want
5620	 to write a REG_EQUAL note for a constant pseudo since verifying that
5621	 that pseudo hasn't been eliminated is a pain.  Such a note also
5622	 won't help anything.
5623
5624	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5625	 which can be created for a reference to a compile time computable
5626	 entry in a jump table.  */
5627
5628      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5629	  && GET_CODE (src_const) != REG
5630	  && ! (GET_CODE (src_const) == CONST
5631		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5632		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5633		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5634	{
5635	  /* We only want a REG_EQUAL note if src_const != src.  */
5636	  if (! rtx_equal_p (src, src_const))
5637	    {
5638	      /* Make sure that the rtx is not shared.  */
5639	      src_const = copy_rtx (src_const);
5640
5641	      /* Record the actual constant value in a REG_EQUAL note,
5642		 making a new one if one does not already exist.  */
5643	      set_unique_reg_note (insn, REG_EQUAL, src_const);
5644	    }
5645	}
5646
5647      /* Now deal with the destination.  */
5648      do_not_record = 0;
5649
5650      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5651	 to the MEM or REG within it.  */
5652      while (GET_CODE (dest) == SIGN_EXTRACT
5653	     || GET_CODE (dest) == ZERO_EXTRACT
5654	     || GET_CODE (dest) == SUBREG
5655	     || GET_CODE (dest) == STRICT_LOW_PART)
5656	dest = XEXP (dest, 0);
5657
5658      sets[i].inner_dest = dest;
5659
5660      if (GET_CODE (dest) == MEM)
5661	{
5662#ifdef PUSH_ROUNDING
5663	  /* Stack pushes invalidate the stack pointer.  */
5664	  rtx addr = XEXP (dest, 0);
5665	  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5666	      && XEXP (addr, 0) == stack_pointer_rtx)
5667	    invalidate (stack_pointer_rtx, Pmode);
5668#endif
5669	  dest = fold_rtx (dest, insn);
5670	}
5671
5672      /* Compute the hash code of the destination now,
5673	 before the effects of this instruction are recorded,
5674	 since the register values used in the address computation
5675	 are those before this instruction.  */
5676      sets[i].dest_hash = HASH (dest, mode);
5677
5678      /* Don't enter a bit-field in the hash table
5679	 because the value in it after the store
5680	 may not equal what was stored, due to truncation.  */
5681
5682      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5683	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5684	{
5685	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5686
5687	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5688	      && GET_CODE (width) == CONST_INT
5689	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5690	      && ! (INTVAL (src_const)
5691		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5692	    /* Exception: if the value is constant,
5693	       and it won't be truncated, record it.  */
5694	    ;
5695	  else
5696	    {
5697	      /* This is chosen so that the destination will be invalidated
5698		 but no new value will be recorded.
5699		 We must invalidate because sometimes constant
5700		 values can be recorded for bitfields.  */
5701	      sets[i].src_elt = 0;
5702	      sets[i].src_volatile = 1;
5703	      src_eqv = 0;
5704	      src_eqv_elt = 0;
5705	    }
5706	}
5707
5708      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5709	 the insn.  */
5710      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5711	{
5712	  /* One less use of the label this insn used to jump to.  */
5713	  delete_insn (insn);
5714	  cse_jumps_altered = 1;
5715	  /* No more processing for this set.  */
5716	  sets[i].rtl = 0;
5717	}
5718
5719      /* If this SET is now setting PC to a label, we know it used to
5720	 be a conditional or computed branch.  */
5721      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5722	{
5723	  /* Now emit a BARRIER after the unconditional jump.  */
5724	  if (NEXT_INSN (insn) == 0
5725	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5726	    emit_barrier_after (insn);
5727
5728	  /* We reemit the jump in as many cases as possible just in
5729	     case the form of an unconditional jump is significantly
5730	     different than a computed jump or conditional jump.
5731
5732	     If this insn has multiple sets, then reemitting the
5733	     jump is nontrivial.  So instead we just force rerecognition
5734	     and hope for the best.  */
5735	  if (n_sets == 1)
5736	    {
5737	      rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5738
5739	      JUMP_LABEL (new) = XEXP (src, 0);
5740	      LABEL_NUSES (XEXP (src, 0))++;
5741	      delete_insn (insn);
5742	      insn = new;
5743
5744	      /* Now emit a BARRIER after the unconditional jump.  */
5745	      if (NEXT_INSN (insn) == 0
5746		  || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5747		emit_barrier_after (insn);
5748	    }
5749	  else
5750	    INSN_CODE (insn) = -1;
5751
5752	  never_reached_warning (insn, NULL);
5753
5754	  /* Do not bother deleting any unreachable code,
5755	     let jump/flow do that.  */
5756
5757	  cse_jumps_altered = 1;
5758	  sets[i].rtl = 0;
5759	}
5760
5761      /* If destination is volatile, invalidate it and then do no further
5762	 processing for this assignment.  */
5763
5764      else if (do_not_record)
5765	{
5766	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5767	    invalidate (dest, VOIDmode);
5768	  else if (GET_CODE (dest) == MEM)
5769	    {
5770	      /* Outgoing arguments for a libcall don't
5771		 affect any recorded expressions.  */
5772	      if (! libcall_insn || insn == libcall_insn)
5773		invalidate (dest, VOIDmode);
5774	    }
5775	  else if (GET_CODE (dest) == STRICT_LOW_PART
5776		   || GET_CODE (dest) == ZERO_EXTRACT)
5777	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5778	  sets[i].rtl = 0;
5779	}
5780
5781      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5782	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5783
5784#ifdef HAVE_cc0
5785      /* If setting CC0, record what it was set to, or a constant, if it
5786	 is equivalent to a constant.  If it is being set to a floating-point
5787	 value, make a COMPARE with the appropriate constant of 0.  If we
5788	 don't do this, later code can interpret this as a test against
5789	 const0_rtx, which can cause problems if we try to put it into an
5790	 insn as a floating-point operand.  */
5791      if (dest == cc0_rtx)
5792	{
5793	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5794	  this_insn_cc0_mode = mode;
5795	  if (FLOAT_MODE_P (mode))
5796	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5797					     CONST0_RTX (mode));
5798	}
5799#endif
5800    }
5801
5802  /* Now enter all non-volatile source expressions in the hash table
5803     if they are not already present.
5804     Record their equivalence classes in src_elt.
5805     This way we can insert the corresponding destinations into
5806     the same classes even if the actual sources are no longer in them
5807     (having been invalidated).  */
5808
5809  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5810      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5811    {
5812      struct table_elt *elt;
5813      struct table_elt *classp = sets[0].src_elt;
5814      rtx dest = SET_DEST (sets[0].rtl);
5815      enum machine_mode eqvmode = GET_MODE (dest);
5816
5817      if (GET_CODE (dest) == STRICT_LOW_PART)
5818	{
5819	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5820	  classp = 0;
5821	}
5822      if (insert_regs (src_eqv, classp, 0))
5823	{
5824	  rehash_using_reg (src_eqv);
5825	  src_eqv_hash = HASH (src_eqv, eqvmode);
5826	}
5827      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5828      elt->in_memory = src_eqv_in_memory;
5829      src_eqv_elt = elt;
5830
5831      /* Check to see if src_eqv_elt is the same as a set source which
5832	 does not yet have an elt, and if so set the elt of the set source
5833	 to src_eqv_elt.  */
5834      for (i = 0; i < n_sets; i++)
5835	if (sets[i].rtl && sets[i].src_elt == 0
5836	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5837	  sets[i].src_elt = src_eqv_elt;
5838    }
5839
5840  for (i = 0; i < n_sets; i++)
5841    if (sets[i].rtl && ! sets[i].src_volatile
5842	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5843      {
5844	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5845	  {
5846	    /* REG_EQUAL in setting a STRICT_LOW_PART
5847	       gives an equivalent for the entire destination register,
5848	       not just for the subreg being stored in now.
5849	       This is a more interesting equivalence, so we arrange later
5850	       to treat the entire reg as the destination.  */
5851	    sets[i].src_elt = src_eqv_elt;
5852	    sets[i].src_hash = src_eqv_hash;
5853	  }
5854	else
5855	  {
5856	    /* Insert source and constant equivalent into hash table, if not
5857	       already present.  */
5858	    struct table_elt *classp = src_eqv_elt;
5859	    rtx src = sets[i].src;
5860	    rtx dest = SET_DEST (sets[i].rtl);
5861	    enum machine_mode mode
5862	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5863
5864	    /* It's possible that we have a source value known to be
5865	       constant but don't have a REG_EQUAL note on the insn.
5866	       Lack of a note will mean src_eqv_elt will be NULL.  This
5867	       can happen where we've generated a SUBREG to access a
5868	       CONST_INT that is already in a register in a wider mode.
5869	       Ensure that the source expression is put in the proper
5870	       constant class.  */
5871	    if (!classp)
5872	      classp = sets[i].src_const_elt;
5873
5874	    if (sets[i].src_elt == 0)
5875	      {
5876		/* Don't put a hard register source into the table if this is
5877		   the last insn of a libcall.  In this case, we only need
5878		   to put src_eqv_elt in src_elt.  */
5879		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5880		  {
5881		    struct table_elt *elt;
5882
5883		    /* Note that these insert_regs calls cannot remove
5884		       any of the src_elt's, because they would have failed to
5885		       match if not still valid.  */
5886		    if (insert_regs (src, classp, 0))
5887		      {
5888			rehash_using_reg (src);
5889			sets[i].src_hash = HASH (src, mode);
5890		      }
5891		    elt = insert (src, classp, sets[i].src_hash, mode);
5892		    elt->in_memory = sets[i].src_in_memory;
5893		    sets[i].src_elt = classp = elt;
5894		  }
5895		else
5896		  sets[i].src_elt = classp;
5897	      }
5898	    if (sets[i].src_const && sets[i].src_const_elt == 0
5899		&& src != sets[i].src_const
5900		&& ! rtx_equal_p (sets[i].src_const, src))
5901	      sets[i].src_elt = insert (sets[i].src_const, classp,
5902					sets[i].src_const_hash, mode);
5903	  }
5904      }
5905    else if (sets[i].src_elt == 0)
5906      /* If we did not insert the source into the hash table (e.g., it was
5907	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5908	 so that the destination goes into that class.  */
5909      sets[i].src_elt = src_eqv_elt;
5910
5911  invalidate_from_clobbers (x);
5912
5913  /* Some registers are invalidated by subroutine calls.  Memory is
5914     invalidated by non-constant calls.  */
5915
5916  if (GET_CODE (insn) == CALL_INSN)
5917    {
5918      if (! CONST_OR_PURE_CALL_P (insn))
5919	invalidate_memory ();
5920      invalidate_for_call ();
5921    }
5922
5923  /* Now invalidate everything set by this instruction.
5924     If a SUBREG or other funny destination is being set,
5925     sets[i].rtl is still nonzero, so here we invalidate the reg
5926     a part of which is being set.  */
5927
5928  for (i = 0; i < n_sets; i++)
5929    if (sets[i].rtl)
5930      {
5931	/* We can't use the inner dest, because the mode associated with
5932	   a ZERO_EXTRACT is significant.  */
5933	rtx dest = SET_DEST (sets[i].rtl);
5934
5935	/* Needed for registers to remove the register from its
5936	   previous quantity's chain.
5937	   Needed for memory if this is a nonvarying address, unless
5938	   we have just done an invalidate_memory that covers even those.  */
5939	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5940	  invalidate (dest, VOIDmode);
5941	else if (GET_CODE (dest) == MEM)
5942	  {
5943	    /* Outgoing arguments for a libcall don't
5944	       affect any recorded expressions.  */
5945	    if (! libcall_insn || insn == libcall_insn)
5946	      invalidate (dest, VOIDmode);
5947	  }
5948	else if (GET_CODE (dest) == STRICT_LOW_PART
5949		 || GET_CODE (dest) == ZERO_EXTRACT)
5950	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5951      }
5952
5953  /* A volatile ASM invalidates everything.  */
5954  if (GET_CODE (insn) == INSN
5955      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5956      && MEM_VOLATILE_P (PATTERN (insn)))
5957    flush_hash_table ();
5958
5959  /* Make sure registers mentioned in destinations
5960     are safe for use in an expression to be inserted.
5961     This removes from the hash table
5962     any invalid entry that refers to one of these registers.
5963
5964     We don't care about the return value from mention_regs because
5965     we are going to hash the SET_DEST values unconditionally.  */
5966
5967  for (i = 0; i < n_sets; i++)
5968    {
5969      if (sets[i].rtl)
5970	{
5971	  rtx x = SET_DEST (sets[i].rtl);
5972
5973	  if (GET_CODE (x) != REG)
5974	    mention_regs (x);
5975	  else
5976	    {
5977	      /* We used to rely on all references to a register becoming
5978		 inaccessible when a register changes to a new quantity,
5979		 since that changes the hash code.  However, that is not
5980		 safe, since after HASH_SIZE new quantities we get a
5981		 hash 'collision' of a register with its own invalid
5982		 entries.  And since SUBREGs have been changed not to
5983		 change their hash code with the hash code of the register,
5984		 it wouldn't work any longer at all.  So we have to check
5985		 for any invalid references lying around now.
5986		 This code is similar to the REG case in mention_regs,
5987		 but it knows that reg_tick has been incremented, and
5988		 it leaves reg_in_table as -1 .  */
5989	      unsigned int regno = REGNO (x);
5990	      unsigned int endregno
5991		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5992			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5993	      unsigned int i;
5994
5995	      for (i = regno; i < endregno; i++)
5996		{
5997		  if (REG_IN_TABLE (i) >= 0)
5998		    {
5999		      remove_invalid_refs (i);
6000		      REG_IN_TABLE (i) = -1;
6001		    }
6002		}
6003	    }
6004	}
6005    }
6006
6007  /* We may have just removed some of the src_elt's from the hash table.
6008     So replace each one with the current head of the same class.  */
6009
6010  for (i = 0; i < n_sets; i++)
6011    if (sets[i].rtl)
6012      {
6013	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6014	  /* If elt was removed, find current head of same class,
6015	     or 0 if nothing remains of that class.  */
6016	  {
6017	    struct table_elt *elt = sets[i].src_elt;
6018
6019	    while (elt && elt->prev_same_value)
6020	      elt = elt->prev_same_value;
6021
6022	    while (elt && elt->first_same_value == 0)
6023	      elt = elt->next_same_value;
6024	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6025	  }
6026      }
6027
6028  /* Now insert the destinations into their equivalence classes.  */
6029
6030  for (i = 0; i < n_sets; i++)
6031    if (sets[i].rtl)
6032      {
6033	rtx dest = SET_DEST (sets[i].rtl);
6034	rtx inner_dest = sets[i].inner_dest;
6035	struct table_elt *elt;
6036
6037	/* Don't record value if we are not supposed to risk allocating
6038	   floating-point values in registers that might be wider than
6039	   memory.  */
6040	if ((flag_float_store
6041	     && GET_CODE (dest) == MEM
6042	     && FLOAT_MODE_P (GET_MODE (dest)))
6043	    /* Don't record BLKmode values, because we don't know the
6044	       size of it, and can't be sure that other BLKmode values
6045	       have the same or smaller size.  */
6046	    || GET_MODE (dest) == BLKmode
6047	    /* Don't record values of destinations set inside a libcall block
6048	       since we might delete the libcall.  Things should have been set
6049	       up so we won't want to reuse such a value, but we play it safe
6050	       here.  */
6051	    || libcall_insn
6052	    /* If we didn't put a REG_EQUAL value or a source into the hash
6053	       table, there is no point is recording DEST.  */
6054	    || sets[i].src_elt == 0
6055	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6056	       or SIGN_EXTEND, don't record DEST since it can cause
6057	       some tracking to be wrong.
6058
6059	       ??? Think about this more later.  */
6060	    || (GET_CODE (dest) == SUBREG
6061		&& (GET_MODE_SIZE (GET_MODE (dest))
6062		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6063		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6064		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6065	  continue;
6066
6067	/* STRICT_LOW_PART isn't part of the value BEING set,
6068	   and neither is the SUBREG inside it.
6069	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6070	if (GET_CODE (dest) == STRICT_LOW_PART)
6071	  dest = SUBREG_REG (XEXP (dest, 0));
6072
6073	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6074	  /* Registers must also be inserted into chains for quantities.  */
6075	  if (insert_regs (dest, sets[i].src_elt, 1))
6076	    {
6077	      /* If `insert_regs' changes something, the hash code must be
6078		 recalculated.  */
6079	      rehash_using_reg (dest);
6080	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6081	    }
6082
6083	if (GET_CODE (inner_dest) == MEM
6084	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6085	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6086	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
6087	     Consider the case in which the address of the MEM is
6088	     passed to a function, which alters the MEM.  Then, if we
6089	     later use Y instead of the MEM we'll miss the update.  */
6090	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6091	else
6092	  elt = insert (dest, sets[i].src_elt,
6093			sets[i].dest_hash, GET_MODE (dest));
6094
6095	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6096			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6097			      || fixed_base_plus_p (XEXP (sets[i].inner_dest,
6098							  0))));
6099
6100	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6101	   narrower than M2, and both M1 and M2 are the same number of words,
6102	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6103	   make that equivalence as well.
6104
6105	   However, BAR may have equivalences for which gen_lowpart_if_possible
6106	   will produce a simpler value than gen_lowpart_if_possible applied to
6107	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6108	   BAR's equivalences.  If we don't get a simplified form, make
6109	   the SUBREG.  It will not be used in an equivalence, but will
6110	   cause two similar assignments to be detected.
6111
6112	   Note the loop below will find SUBREG_REG (DEST) since we have
6113	   already entered SRC and DEST of the SET in the table.  */
6114
6115	if (GET_CODE (dest) == SUBREG
6116	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6117		 / UNITS_PER_WORD)
6118		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6119	    && (GET_MODE_SIZE (GET_MODE (dest))
6120		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6121	    && sets[i].src_elt != 0)
6122	  {
6123	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6124	    struct table_elt *elt, *classp = 0;
6125
6126	    for (elt = sets[i].src_elt->first_same_value; elt;
6127		 elt = elt->next_same_value)
6128	      {
6129		rtx new_src = 0;
6130		unsigned src_hash;
6131		struct table_elt *src_elt;
6132		int byte = 0;
6133
6134		/* Ignore invalid entries.  */
6135		if (GET_CODE (elt->exp) != REG
6136		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6137		  continue;
6138
6139		/* We may have already been playing subreg games.  If the
6140		   mode is already correct for the destination, use it.  */
6141		if (GET_MODE (elt->exp) == new_mode)
6142		  new_src = elt->exp;
6143		else
6144		  {
6145		    /* Calculate big endian correction for the SUBREG_BYTE.
6146		       We have already checked that M1 (GET_MODE (dest))
6147		       is not narrower than M2 (new_mode).  */
6148		    if (BYTES_BIG_ENDIAN)
6149		      byte = (GET_MODE_SIZE (GET_MODE (dest))
6150			      - GET_MODE_SIZE (new_mode));
6151
6152		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6153					           GET_MODE (dest), byte);
6154		  }
6155
6156		/* The call to simplify_gen_subreg fails if the value
6157		   is VOIDmode, yet we can't do any simplification, e.g.
6158		   for EXPR_LISTs denoting function call results.
6159		   It is invalid to construct a SUBREG with a VOIDmode
6160		   SUBREG_REG, hence a zero new_src means we can't do
6161		   this substitution.  */
6162		if (! new_src)
6163		  continue;
6164
6165		src_hash = HASH (new_src, new_mode);
6166		src_elt = lookup (new_src, src_hash, new_mode);
6167
6168		/* Put the new source in the hash table is if isn't
6169		   already.  */
6170		if (src_elt == 0)
6171		  {
6172		    if (insert_regs (new_src, classp, 0))
6173		      {
6174			rehash_using_reg (new_src);
6175			src_hash = HASH (new_src, new_mode);
6176		      }
6177		    src_elt = insert (new_src, classp, src_hash, new_mode);
6178		    src_elt->in_memory = elt->in_memory;
6179		  }
6180		else if (classp && classp != src_elt->first_same_value)
6181		  /* Show that two things that we've seen before are
6182		     actually the same.  */
6183		  merge_equiv_classes (src_elt, classp);
6184
6185		classp = src_elt->first_same_value;
6186		/* Ignore invalid entries.  */
6187		while (classp
6188		       && GET_CODE (classp->exp) != REG
6189		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6190		  classp = classp->next_same_value;
6191	      }
6192	  }
6193      }
6194
6195  /* Special handling for (set REG0 REG1) where REG0 is the
6196     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6197     be used in the sequel, so (if easily done) change this insn to
6198     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6199     that computed their value.  Then REG1 will become a dead store
6200     and won't cloud the situation for later optimizations.
6201
6202     Do not make this change if REG1 is a hard register, because it will
6203     then be used in the sequel and we may be changing a two-operand insn
6204     into a three-operand insn.
6205
6206     Also do not do this if we are operating on a copy of INSN.
6207
6208     Also don't do this if INSN ends a libcall; this would cause an unrelated
6209     register to be set in the middle of a libcall, and we then get bad code
6210     if the libcall is deleted.  */
6211
6212  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6213      && NEXT_INSN (PREV_INSN (insn)) == insn
6214      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6215      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6216      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6217    {
6218      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6219      struct qty_table_elem *src_ent = &qty_table[src_q];
6220
6221      if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6222	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6223	{
6224	  rtx prev = insn;
6225	  /* Scan for the previous nonnote insn, but stop at a basic
6226	     block boundary.  */
6227	  do
6228	    {
6229	      prev = PREV_INSN (prev);
6230	    }
6231	  while (prev && GET_CODE (prev) == NOTE
6232		 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6233
6234	  /* Do not swap the registers around if the previous instruction
6235	     attaches a REG_EQUIV note to REG1.
6236
6237	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6238	     from the pseudo that originally shadowed an incoming argument
6239	     to another register.  Some uses of REG_EQUIV might rely on it
6240	     being attached to REG1 rather than REG2.
6241
6242	     This section previously turned the REG_EQUIV into a REG_EQUAL
6243	     note.  We cannot do that because REG_EQUIV may provide an
6244	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
6245
6246	  if (prev != 0 && GET_CODE (prev) == INSN
6247	      && GET_CODE (PATTERN (prev)) == SET
6248	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6249	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6250	    {
6251	      rtx dest = SET_DEST (sets[0].rtl);
6252	      rtx src = SET_SRC (sets[0].rtl);
6253	      rtx note;
6254
6255	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6256	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6257	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6258	      apply_change_group ();
6259
6260	      /* If INSN has a REG_EQUAL note, and this note mentions
6261		 REG0, then we must delete it, because the value in
6262		 REG0 has changed.  If the note's value is REG1, we must
6263		 also delete it because that is now this insn's dest.  */
6264	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6265	      if (note != 0
6266		  && (reg_mentioned_p (dest, XEXP (note, 0))
6267		      || rtx_equal_p (src, XEXP (note, 0))))
6268		remove_note (insn, note);
6269	    }
6270	}
6271    }
6272
6273  /* If this is a conditional jump insn, record any known equivalences due to
6274     the condition being tested.  */
6275
6276  last_jump_equiv_class = 0;
6277  if (GET_CODE (insn) == JUMP_INSN
6278      && n_sets == 1 && GET_CODE (x) == SET
6279      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6280    record_jump_equiv (insn, 0);
6281
6282#ifdef HAVE_cc0
6283  /* If the previous insn set CC0 and this insn no longer references CC0,
6284     delete the previous insn.  Here we use the fact that nothing expects CC0
6285     to be valid over an insn, which is true until the final pass.  */
6286  if (prev_insn && GET_CODE (prev_insn) == INSN
6287      && (tem = single_set (prev_insn)) != 0
6288      && SET_DEST (tem) == cc0_rtx
6289      && ! reg_mentioned_p (cc0_rtx, x))
6290    delete_insn (prev_insn);
6291
6292  prev_insn_cc0 = this_insn_cc0;
6293  prev_insn_cc0_mode = this_insn_cc0_mode;
6294  prev_insn = insn;
6295#endif
6296}
6297
6298/* Remove from the hash table all expressions that reference memory.  */
6299
6300static void
6301invalidate_memory (void)
6302{
6303  int i;
6304  struct table_elt *p, *next;
6305
6306  for (i = 0; i < HASH_SIZE; i++)
6307    for (p = table[i]; p; p = next)
6308      {
6309	next = p->next_same_hash;
6310	if (p->in_memory)
6311	  remove_from_table (p, i);
6312      }
6313}
6314
6315/* If ADDR is an address that implicitly affects the stack pointer, return
6316   1 and update the register tables to show the effect.  Else, return 0.  */
6317
6318static int
6319addr_affects_sp_p (rtx addr)
6320{
6321  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6322      && GET_CODE (XEXP (addr, 0)) == REG
6323      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6324    {
6325      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6326	{
6327	  REG_TICK (STACK_POINTER_REGNUM)++;
6328	  /* Is it possible to use a subreg of SP?  */
6329	  SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6330	}
6331
6332      /* This should be *very* rare.  */
6333      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6334	invalidate (stack_pointer_rtx, VOIDmode);
6335
6336      return 1;
6337    }
6338
6339  return 0;
6340}
6341
6342/* Perform invalidation on the basis of everything about an insn
6343   except for invalidating the actual places that are SET in it.
6344   This includes the places CLOBBERed, and anything that might
6345   alias with something that is SET or CLOBBERed.
6346
6347   X is the pattern of the insn.  */
6348
6349static void
6350invalidate_from_clobbers (rtx x)
6351{
6352  if (GET_CODE (x) == CLOBBER)
6353    {
6354      rtx ref = XEXP (x, 0);
6355      if (ref)
6356	{
6357	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6358	      || GET_CODE (ref) == MEM)
6359	    invalidate (ref, VOIDmode);
6360	  else if (GET_CODE (ref) == STRICT_LOW_PART
6361		   || GET_CODE (ref) == ZERO_EXTRACT)
6362	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6363	}
6364    }
6365  else if (GET_CODE (x) == PARALLEL)
6366    {
6367      int i;
6368      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6369	{
6370	  rtx y = XVECEXP (x, 0, i);
6371	  if (GET_CODE (y) == CLOBBER)
6372	    {
6373	      rtx ref = XEXP (y, 0);
6374	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6375		  || GET_CODE (ref) == MEM)
6376		invalidate (ref, VOIDmode);
6377	      else if (GET_CODE (ref) == STRICT_LOW_PART
6378		       || GET_CODE (ref) == ZERO_EXTRACT)
6379		invalidate (XEXP (ref, 0), GET_MODE (ref));
6380	    }
6381	}
6382    }
6383}
6384
6385/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6386   and replace any registers in them with either an equivalent constant
6387   or the canonical form of the register.  If we are inside an address,
6388   only do this if the address remains valid.
6389
6390   OBJECT is 0 except when within a MEM in which case it is the MEM.
6391
6392   Return the replacement for X.  */
6393
6394static rtx
6395cse_process_notes (rtx x, rtx object)
6396{
6397  enum rtx_code code = GET_CODE (x);
6398  const char *fmt = GET_RTX_FORMAT (code);
6399  int i;
6400
6401  switch (code)
6402    {
6403    case CONST_INT:
6404    case CONST:
6405    case SYMBOL_REF:
6406    case LABEL_REF:
6407    case CONST_DOUBLE:
6408    case CONST_VECTOR:
6409    case PC:
6410    case CC0:
6411    case LO_SUM:
6412      return x;
6413
6414    case MEM:
6415      validate_change (x, &XEXP (x, 0),
6416		       cse_process_notes (XEXP (x, 0), x), 0);
6417      return x;
6418
6419    case EXPR_LIST:
6420    case INSN_LIST:
6421      if (REG_NOTE_KIND (x) == REG_EQUAL)
6422	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6423      if (XEXP (x, 1))
6424	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6425      return x;
6426
6427    case SIGN_EXTEND:
6428    case ZERO_EXTEND:
6429    case SUBREG:
6430      {
6431	rtx new = cse_process_notes (XEXP (x, 0), object);
6432	/* We don't substitute VOIDmode constants into these rtx,
6433	   since they would impede folding.  */
6434	if (GET_MODE (new) != VOIDmode)
6435	  validate_change (object, &XEXP (x, 0), new, 0);
6436	return x;
6437      }
6438
6439    case REG:
6440      i = REG_QTY (REGNO (x));
6441
6442      /* Return a constant or a constant register.  */
6443      if (REGNO_QTY_VALID_P (REGNO (x)))
6444	{
6445	  struct qty_table_elem *ent = &qty_table[i];
6446
6447	  if (ent->const_rtx != NULL_RTX
6448	      && (CONSTANT_P (ent->const_rtx)
6449		  || GET_CODE (ent->const_rtx) == REG))
6450	    {
6451	      rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6452	      if (new)
6453		return new;
6454	    }
6455	}
6456
6457      /* Otherwise, canonicalize this register.  */
6458      return canon_reg (x, NULL_RTX);
6459
6460    default:
6461      break;
6462    }
6463
6464  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6465    if (fmt[i] == 'e')
6466      validate_change (object, &XEXP (x, i),
6467		       cse_process_notes (XEXP (x, i), object), 0);
6468
6469  return x;
6470}
6471
6472/* Find common subexpressions between the end test of a loop and the beginning
6473   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
6474
6475   Often we have a loop where an expression in the exit test is used
6476   in the body of the loop.  For example "while (*p) *q++ = *p++;".
6477   Because of the way we duplicate the loop exit test in front of the loop,
6478   however, we don't detect that common subexpression.  This will be caught
6479   when global cse is implemented, but this is a quite common case.
6480
6481   This function handles the most common cases of these common expressions.
6482   It is called after we have processed the basic block ending with the
6483   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6484   jumps to a label used only once.  */
6485
6486static void
6487cse_around_loop (rtx loop_start)
6488{
6489  rtx insn;
6490  int i;
6491  struct table_elt *p;
6492
6493  /* If the jump at the end of the loop doesn't go to the start, we don't
6494     do anything.  */
6495  for (insn = PREV_INSN (loop_start);
6496       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6497       insn = PREV_INSN (insn))
6498    ;
6499
6500  if (insn == 0
6501      || GET_CODE (insn) != NOTE
6502      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6503    return;
6504
6505  /* If the last insn of the loop (the end test) was an NE comparison,
6506     we will interpret it as an EQ comparison, since we fell through
6507     the loop.  Any equivalences resulting from that comparison are
6508     therefore not valid and must be invalidated.  */
6509  if (last_jump_equiv_class)
6510    for (p = last_jump_equiv_class->first_same_value; p;
6511	 p = p->next_same_value)
6512      {
6513	if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6514	    || (GET_CODE (p->exp) == SUBREG
6515		&& GET_CODE (SUBREG_REG (p->exp)) == REG))
6516	  invalidate (p->exp, VOIDmode);
6517	else if (GET_CODE (p->exp) == STRICT_LOW_PART
6518		 || GET_CODE (p->exp) == ZERO_EXTRACT)
6519	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6520      }
6521
6522  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6523     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6524
6525     The only thing we do with SET_DEST is invalidate entries, so we
6526     can safely process each SET in order.  It is slightly less efficient
6527     to do so, but we only want to handle the most common cases.
6528
6529     The gen_move_insn call in cse_set_around_loop may create new pseudos.
6530     These pseudos won't have valid entries in any of the tables indexed
6531     by register number, such as reg_qty.  We avoid out-of-range array
6532     accesses by not processing any instructions created after cse started.  */
6533
6534  for (insn = NEXT_INSN (loop_start);
6535       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6536       && INSN_UID (insn) < max_insn_uid
6537       && ! (GET_CODE (insn) == NOTE
6538	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6539       insn = NEXT_INSN (insn))
6540    {
6541      if (INSN_P (insn)
6542	  && (GET_CODE (PATTERN (insn)) == SET
6543	      || GET_CODE (PATTERN (insn)) == CLOBBER))
6544	cse_set_around_loop (PATTERN (insn), insn, loop_start);
6545      else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6546	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6547	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6548	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6549	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6550				 loop_start);
6551    }
6552}
6553
6554/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6555   since they are done elsewhere.  This function is called via note_stores.  */
6556
6557static void
6558invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6559{
6560  enum rtx_code code = GET_CODE (dest);
6561
6562  if (code == MEM
6563      && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6564      /* There are times when an address can appear varying and be a PLUS
6565	 during this scan when it would be a fixed address were we to know
6566	 the proper equivalences.  So invalidate all memory if there is
6567	 a BLKmode or nonscalar memory reference or a reference to a
6568	 variable address.  */
6569      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6570	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6571    {
6572      invalidate_memory ();
6573      return;
6574    }
6575
6576  if (GET_CODE (set) == CLOBBER
6577      || CC0_P (dest)
6578      || dest == pc_rtx)
6579    return;
6580
6581  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6582    invalidate (XEXP (dest, 0), GET_MODE (dest));
6583  else if (code == REG || code == SUBREG || code == MEM)
6584    invalidate (dest, VOIDmode);
6585}
6586
6587/* Invalidate all insns from START up to the end of the function or the
6588   next label.  This called when we wish to CSE around a block that is
6589   conditionally executed.  */
6590
6591static void
6592invalidate_skipped_block (rtx start)
6593{
6594  rtx insn;
6595
6596  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6597       insn = NEXT_INSN (insn))
6598    {
6599      if (! INSN_P (insn))
6600	continue;
6601
6602      if (GET_CODE (insn) == CALL_INSN)
6603	{
6604	  if (! CONST_OR_PURE_CALL_P (insn))
6605	    invalidate_memory ();
6606	  invalidate_for_call ();
6607	}
6608
6609      invalidate_from_clobbers (PATTERN (insn));
6610      note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6611    }
6612}
6613
6614/* If modifying X will modify the value in *DATA (which is really an
6615   `rtx *'), indicate that fact by setting the pointed to value to
6616   NULL_RTX.  */
6617
6618static void
6619cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
6620{
6621  rtx *cse_check_loop_start_value = (rtx *) data;
6622
6623  if (*cse_check_loop_start_value == NULL_RTX
6624      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6625    return;
6626
6627  if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6628      || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6629    *cse_check_loop_start_value = NULL_RTX;
6630}
6631
6632/* X is a SET or CLOBBER contained in INSN that was found near the start of
6633   a loop that starts with the label at LOOP_START.
6634
6635   If X is a SET, we see if its SET_SRC is currently in our hash table.
6636   If so, we see if it has a value equal to some register used only in the
6637   loop exit code (as marked by jump.c).
6638
6639   If those two conditions are true, we search backwards from the start of
6640   the loop to see if that same value was loaded into a register that still
6641   retains its value at the start of the loop.
6642
6643   If so, we insert an insn after the load to copy the destination of that
6644   load into the equivalent register and (try to) replace our SET_SRC with that
6645   register.
6646
6647   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
6648
6649static void
6650cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
6651{
6652  struct table_elt *src_elt;
6653
6654  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6655     are setting PC or CC0 or whose SET_SRC is already a register.  */
6656  if (GET_CODE (x) == SET
6657      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6658      && GET_CODE (SET_SRC (x)) != REG)
6659    {
6660      src_elt = lookup (SET_SRC (x),
6661			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6662			GET_MODE (SET_DEST (x)));
6663
6664      if (src_elt)
6665	for (src_elt = src_elt->first_same_value; src_elt;
6666	     src_elt = src_elt->next_same_value)
6667	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6668	      && COST (src_elt->exp) < COST (SET_SRC (x)))
6669	    {
6670	      rtx p, set;
6671
6672	      /* Look for an insn in front of LOOP_START that sets
6673		 something in the desired mode to SET_SRC (x) before we hit
6674		 a label or CALL_INSN.  */
6675
6676	      for (p = prev_nonnote_insn (loop_start);
6677		   p && GET_CODE (p) != CALL_INSN
6678		   && GET_CODE (p) != CODE_LABEL;
6679		   p = prev_nonnote_insn  (p))
6680		if ((set = single_set (p)) != 0
6681		    && GET_CODE (SET_DEST (set)) == REG
6682		    && GET_MODE (SET_DEST (set)) == src_elt->mode
6683		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6684		  {
6685		    /* We now have to ensure that nothing between P
6686		       and LOOP_START modified anything referenced in
6687		       SET_SRC (x).  We know that nothing within the loop
6688		       can modify it, or we would have invalidated it in
6689		       the hash table.  */
6690		    rtx q;
6691		    rtx cse_check_loop_start_value = SET_SRC (x);
6692		    for (q = p; q != loop_start; q = NEXT_INSN (q))
6693		      if (INSN_P (q))
6694			note_stores (PATTERN (q),
6695				     cse_check_loop_start,
6696				     &cse_check_loop_start_value);
6697
6698		    /* If nothing was changed and we can replace our
6699		       SET_SRC, add an insn after P to copy its destination
6700		       to what we will be replacing SET_SRC with.  */
6701		    if (cse_check_loop_start_value
6702			&& single_set (p)
6703			&& !can_throw_internal (insn)
6704			&& validate_change (insn, &SET_SRC (x),
6705					    src_elt->exp, 0))
6706		      {
6707			/* If this creates new pseudos, this is unsafe,
6708			   because the regno of new pseudo is unsuitable
6709			   to index into reg_qty when cse_insn processes
6710			   the new insn.  Therefore, if a new pseudo was
6711			   created, discard this optimization.  */
6712			int nregs = max_reg_num ();
6713			rtx move
6714			  = gen_move_insn (src_elt->exp, SET_DEST (set));
6715			if (nregs != max_reg_num ())
6716			  {
6717			    if (! validate_change (insn, &SET_SRC (x),
6718						   SET_SRC (set), 0))
6719			      abort ();
6720			  }
6721			else
6722			  {
6723			    if (CONSTANT_P (SET_SRC (set))
6724				&& ! find_reg_equal_equiv_note (insn))
6725			      set_unique_reg_note (insn, REG_EQUAL,
6726						   SET_SRC (set));
6727			    if (control_flow_insn_p (p))
6728			      /* p can cause a control flow transfer so it
6729				 is the last insn of a basic block.  We can't
6730				 therefore use emit_insn_after.  */
6731			      emit_insn_before (move, next_nonnote_insn (p));
6732			    else
6733			      emit_insn_after (move, p);
6734			  }
6735		      }
6736		    break;
6737		  }
6738	    }
6739    }
6740
6741  /* Deal with the destination of X affecting the stack pointer.  */
6742  addr_affects_sp_p (SET_DEST (x));
6743
6744  /* See comment on similar code in cse_insn for explanation of these
6745     tests.  */
6746  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6747      || GET_CODE (SET_DEST (x)) == MEM)
6748    invalidate (SET_DEST (x), VOIDmode);
6749  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6750	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6751    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6752}
6753
6754/* Find the end of INSN's basic block and return its range,
6755   the total number of SETs in all the insns of the block, the last insn of the
6756   block, and the branch path.
6757
6758   The branch path indicates which branches should be followed.  If a nonzero
6759   path size is specified, the block should be rescanned and a different set
6760   of branches will be taken.  The branch path is only used if
6761   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6762
6763   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6764   used to describe the block.  It is filled in with the information about
6765   the current block.  The incoming structure's branch path, if any, is used
6766   to construct the output branch path.  */
6767
6768void
6769cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6770			int follow_jumps, int after_loop, int skip_blocks)
6771{
6772  rtx p = insn, q;
6773  int nsets = 0;
6774  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6775  rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6776  int path_size = data->path_size;
6777  int path_entry = 0;
6778  int i;
6779
6780  /* Update the previous branch path, if any.  If the last branch was
6781     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
6782     shorten the path by one and look at the previous branch.  We know that
6783     at least one branch must have been taken if PATH_SIZE is nonzero.  */
6784  while (path_size > 0)
6785    {
6786      if (data->path[path_size - 1].status != NOT_TAKEN)
6787	{
6788	  data->path[path_size - 1].status = NOT_TAKEN;
6789	  break;
6790	}
6791      else
6792	path_size--;
6793    }
6794
6795  /* If the first instruction is marked with QImode, that means we've
6796     already processed this block.  Our caller will look at DATA->LAST
6797     to figure out where to go next.  We want to return the next block
6798     in the instruction stream, not some branched-to block somewhere
6799     else.  We accomplish this by pretending our called forbid us to
6800     follow jumps, or skip blocks.  */
6801  if (GET_MODE (insn) == QImode)
6802    follow_jumps = skip_blocks = 0;
6803
6804  /* Scan to end of this basic block.  */
6805  while (p && GET_CODE (p) != CODE_LABEL)
6806    {
6807      /* Don't cse out the end of a loop.  This makes a difference
6808	 only for the unusual loops that always execute at least once;
6809	 all other loops have labels there so we will stop in any case.
6810	 Cse'ing out the end of the loop is dangerous because it
6811	 might cause an invariant expression inside the loop
6812	 to be reused after the end of the loop.  This would make it
6813	 hard to move the expression out of the loop in loop.c,
6814	 especially if it is one of several equivalent expressions
6815	 and loop.c would like to eliminate it.
6816
6817	 If we are running after loop.c has finished, we can ignore
6818	 the NOTE_INSN_LOOP_END.  */
6819
6820      if (! after_loop && GET_CODE (p) == NOTE
6821	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6822	break;
6823
6824      /* Don't cse over a call to setjmp; on some machines (eg VAX)
6825	 the regs restored by the longjmp come from
6826	 a later time than the setjmp.  */
6827      if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6828	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6829	break;
6830
6831      /* A PARALLEL can have lots of SETs in it,
6832	 especially if it is really an ASM_OPERANDS.  */
6833      if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6834	nsets += XVECLEN (PATTERN (p), 0);
6835      else if (GET_CODE (p) != NOTE)
6836	nsets += 1;
6837
6838      /* Ignore insns made by CSE; they cannot affect the boundaries of
6839	 the basic block.  */
6840
6841      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6842	high_cuid = INSN_CUID (p);
6843      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6844	low_cuid = INSN_CUID (p);
6845
6846      /* See if this insn is in our branch path.  If it is and we are to
6847	 take it, do so.  */
6848      if (path_entry < path_size && data->path[path_entry].branch == p)
6849	{
6850	  if (data->path[path_entry].status != NOT_TAKEN)
6851	    p = JUMP_LABEL (p);
6852
6853	  /* Point to next entry in path, if any.  */
6854	  path_entry++;
6855	}
6856
6857      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6858	 was specified, we haven't reached our maximum path length, there are
6859	 insns following the target of the jump, this is the only use of the
6860	 jump label, and the target label is preceded by a BARRIER.
6861
6862	 Alternatively, we can follow the jump if it branches around a
6863	 block of code and there are no other branches into the block.
6864	 In this case invalidate_skipped_block will be called to invalidate any
6865	 registers set in the block when following the jump.  */
6866
6867      else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6868	       && GET_CODE (p) == JUMP_INSN
6869	       && GET_CODE (PATTERN (p)) == SET
6870	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6871	       && JUMP_LABEL (p) != 0
6872	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6873	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6874	{
6875	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6876	    if ((GET_CODE (q) != NOTE
6877		 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6878		 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6879		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6880		&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6881	      break;
6882
6883	  /* If we ran into a BARRIER, this code is an extension of the
6884	     basic block when the branch is taken.  */
6885	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6886	    {
6887	      /* Don't allow ourself to keep walking around an
6888		 always-executed loop.  */
6889	      if (next_real_insn (q) == next)
6890		{
6891		  p = NEXT_INSN (p);
6892		  continue;
6893		}
6894
6895	      /* Similarly, don't put a branch in our path more than once.  */
6896	      for (i = 0; i < path_entry; i++)
6897		if (data->path[i].branch == p)
6898		  break;
6899
6900	      if (i != path_entry)
6901		break;
6902
6903	      data->path[path_entry].branch = p;
6904	      data->path[path_entry++].status = TAKEN;
6905
6906	      /* This branch now ends our path.  It was possible that we
6907		 didn't see this branch the last time around (when the
6908		 insn in front of the target was a JUMP_INSN that was
6909		 turned into a no-op).  */
6910	      path_size = path_entry;
6911
6912	      p = JUMP_LABEL (p);
6913	      /* Mark block so we won't scan it again later.  */
6914	      PUT_MODE (NEXT_INSN (p), QImode);
6915	    }
6916	  /* Detect a branch around a block of code.  */
6917	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6918	    {
6919	      rtx tmp;
6920
6921	      if (next_real_insn (q) == next)
6922		{
6923		  p = NEXT_INSN (p);
6924		  continue;
6925		}
6926
6927	      for (i = 0; i < path_entry; i++)
6928		if (data->path[i].branch == p)
6929		  break;
6930
6931	      if (i != path_entry)
6932		break;
6933
6934	      /* This is no_labels_between_p (p, q) with an added check for
6935		 reaching the end of a function (in case Q precedes P).  */
6936	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6937		if (GET_CODE (tmp) == CODE_LABEL)
6938		  break;
6939
6940	      if (tmp == q)
6941		{
6942		  data->path[path_entry].branch = p;
6943		  data->path[path_entry++].status = AROUND;
6944
6945		  path_size = path_entry;
6946
6947		  p = JUMP_LABEL (p);
6948		  /* Mark block so we won't scan it again later.  */
6949		  PUT_MODE (NEXT_INSN (p), QImode);
6950		}
6951	    }
6952	}
6953      p = NEXT_INSN (p);
6954    }
6955
6956  data->low_cuid = low_cuid;
6957  data->high_cuid = high_cuid;
6958  data->nsets = nsets;
6959  data->last = p;
6960
6961  /* If all jumps in the path are not taken, set our path length to zero
6962     so a rescan won't be done.  */
6963  for (i = path_size - 1; i >= 0; i--)
6964    if (data->path[i].status != NOT_TAKEN)
6965      break;
6966
6967  if (i == -1)
6968    data->path_size = 0;
6969  else
6970    data->path_size = path_size;
6971
6972  /* End the current branch path.  */
6973  data->path[path_size].branch = 0;
6974}
6975
6976/* Perform cse on the instructions of a function.
6977   F is the first instruction.
6978   NREGS is one plus the highest pseudo-reg number used in the instruction.
6979
6980   AFTER_LOOP is 1 if this is the cse call done after loop optimization
6981   (only if -frerun-cse-after-loop).
6982
6983   Returns 1 if jump_optimize should be redone due to simplifications
6984   in conditional jump instructions.  */
6985
6986int
6987cse_main (rtx f, int nregs, int after_loop, FILE *file)
6988{
6989  struct cse_basic_block_data val;
6990  rtx insn = f;
6991  int i;
6992
6993  val.path = xmalloc (sizeof (struct branch_path)
6994		      * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6995
6996  cse_jumps_altered = 0;
6997  recorded_label_ref = 0;
6998  constant_pool_entries_cost = 0;
6999  constant_pool_entries_regcost = 0;
7000  val.path_size = 0;
7001
7002  init_recog ();
7003  init_alias_analysis ();
7004
7005  max_reg = nregs;
7006
7007  max_insn_uid = get_max_uid ();
7008
7009  reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7010
7011#ifdef LOAD_EXTEND_OP
7012
7013  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
7014     and change the code and mode as appropriate.  */
7015  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7016#endif
7017
7018  /* Reset the counter indicating how many elements have been made
7019     thus far.  */
7020  n_elements_made = 0;
7021
7022  /* Find the largest uid.  */
7023
7024  max_uid = get_max_uid ();
7025  uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7026
7027  /* Compute the mapping from uids to cuids.
7028     CUIDs are numbers assigned to insns, like uids,
7029     except that cuids increase monotonically through the code.
7030     Don't assign cuids to line-number NOTEs, so that the distance in cuids
7031     between two insns is not affected by -g.  */
7032
7033  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7034    {
7035      if (GET_CODE (insn) != NOTE
7036	  || NOTE_LINE_NUMBER (insn) < 0)
7037	INSN_CUID (insn) = ++i;
7038      else
7039	/* Give a line number note the same cuid as preceding insn.  */
7040	INSN_CUID (insn) = i;
7041    }
7042
7043  ggc_push_context ();
7044
7045  /* Loop over basic blocks.
7046     Compute the maximum number of qty's needed for each basic block
7047     (which is 2 for each SET).  */
7048  insn = f;
7049  while (insn)
7050    {
7051      cse_altered = 0;
7052      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7053			      flag_cse_skip_blocks);
7054
7055      /* If this basic block was already processed or has no sets, skip it.  */
7056      if (val.nsets == 0 || GET_MODE (insn) == QImode)
7057	{
7058	  PUT_MODE (insn, VOIDmode);
7059	  insn = (val.last ? NEXT_INSN (val.last) : 0);
7060	  val.path_size = 0;
7061	  continue;
7062	}
7063
7064      cse_basic_block_start = val.low_cuid;
7065      cse_basic_block_end = val.high_cuid;
7066      max_qty = val.nsets * 2;
7067
7068      if (file)
7069	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7070		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7071		 val.nsets);
7072
7073      /* Make MAX_QTY bigger to give us room to optimize
7074	 past the end of this basic block, if that should prove useful.  */
7075      if (max_qty < 500)
7076	max_qty = 500;
7077
7078      /* If this basic block is being extended by following certain jumps,
7079         (see `cse_end_of_basic_block'), we reprocess the code from the start.
7080         Otherwise, we start after this basic block.  */
7081      if (val.path_size > 0)
7082	cse_basic_block (insn, val.last, val.path, 0);
7083      else
7084	{
7085	  int old_cse_jumps_altered = cse_jumps_altered;
7086	  rtx temp;
7087
7088	  /* When cse changes a conditional jump to an unconditional
7089	     jump, we want to reprocess the block, since it will give
7090	     us a new branch path to investigate.  */
7091	  cse_jumps_altered = 0;
7092	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7093	  if (cse_jumps_altered == 0
7094	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7095	    insn = temp;
7096
7097	  cse_jumps_altered |= old_cse_jumps_altered;
7098	}
7099
7100      if (cse_altered)
7101	ggc_collect ();
7102
7103#ifdef USE_C_ALLOCA
7104      alloca (0);
7105#endif
7106    }
7107
7108  ggc_pop_context ();
7109
7110  if (max_elements_made < n_elements_made)
7111    max_elements_made = n_elements_made;
7112
7113  /* Clean up.  */
7114  end_alias_analysis ();
7115  free (uid_cuid);
7116  free (reg_eqv_table);
7117  free (val.path);
7118
7119  return cse_jumps_altered || recorded_label_ref;
7120}
7121
7122/* Process a single basic block.  FROM and TO and the limits of the basic
7123   block.  NEXT_BRANCH points to the branch path when following jumps or
7124   a null path when not following jumps.
7125
7126   AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7127   loop.  This is true when we are being called for the last time on a
7128   block and this CSE pass is before loop.c.  */
7129
7130static rtx
7131cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7132		 int around_loop)
7133{
7134  rtx insn;
7135  int to_usage = 0;
7136  rtx libcall_insn = NULL_RTX;
7137  int num_insns = 0;
7138  int no_conflict = 0;
7139
7140  /* Allocate the space needed by qty_table.  */
7141  qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
7142
7143  new_basic_block ();
7144
7145  /* TO might be a label.  If so, protect it from being deleted.  */
7146  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7147    ++LABEL_NUSES (to);
7148
7149  for (insn = from; insn != to; insn = NEXT_INSN (insn))
7150    {
7151      enum rtx_code code = GET_CODE (insn);
7152
7153      /* If we have processed 1,000 insns, flush the hash table to
7154	 avoid extreme quadratic behavior.  We must not include NOTEs
7155	 in the count since there may be more of them when generating
7156	 debugging information.  If we clear the table at different
7157	 times, code generated with -g -O might be different than code
7158	 generated with -O but not -g.
7159
7160	 ??? This is a real kludge and needs to be done some other way.
7161	 Perhaps for 2.9.  */
7162      if (code != NOTE && num_insns++ > 1000)
7163	{
7164	  flush_hash_table ();
7165	  num_insns = 0;
7166	}
7167
7168      /* See if this is a branch that is part of the path.  If so, and it is
7169	 to be taken, do so.  */
7170      if (next_branch->branch == insn)
7171	{
7172	  enum taken status = next_branch++->status;
7173	  if (status != NOT_TAKEN)
7174	    {
7175	      if (status == TAKEN)
7176		record_jump_equiv (insn, 1);
7177	      else
7178		invalidate_skipped_block (NEXT_INSN (insn));
7179
7180	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7181		 Then follow this branch.  */
7182#ifdef HAVE_cc0
7183	      prev_insn_cc0 = 0;
7184	      prev_insn = insn;
7185#endif
7186	      insn = JUMP_LABEL (insn);
7187	      continue;
7188	    }
7189	}
7190
7191      if (GET_MODE (insn) == QImode)
7192	PUT_MODE (insn, VOIDmode);
7193
7194      if (GET_RTX_CLASS (code) == 'i')
7195	{
7196	  rtx p;
7197
7198	  /* Process notes first so we have all notes in canonical forms when
7199	     looking for duplicate operations.  */
7200
7201	  if (REG_NOTES (insn))
7202	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7203
7204	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7205	     we do not want to record destinations.  The last insn of a
7206	     LIBCALL block is not considered to be part of the block, since
7207	     its destination is the result of the block and hence should be
7208	     recorded.  */
7209
7210	  if (REG_NOTES (insn) != 0)
7211	    {
7212	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7213		libcall_insn = XEXP (p, 0);
7214	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7215		{
7216		  /* Keep libcall_insn for the last SET insn of a no-conflict
7217		     block to prevent changing the destination.  */
7218		  if (! no_conflict)
7219		    libcall_insn = 0;
7220		  else
7221		    no_conflict = -1;
7222		}
7223	      else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7224		no_conflict = 1;
7225	    }
7226
7227	  cse_insn (insn, libcall_insn);
7228
7229	  if (no_conflict == -1)
7230	    {
7231	      libcall_insn = 0;
7232	      no_conflict = 0;
7233	    }
7234
7235	  /* If we haven't already found an insn where we added a LABEL_REF,
7236	     check this one.  */
7237	  if (GET_CODE (insn) == INSN && ! recorded_label_ref
7238	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7239			       (void *) insn))
7240	    recorded_label_ref = 1;
7241	}
7242
7243      /* If INSN is now an unconditional jump, skip to the end of our
7244	 basic block by pretending that we just did the last insn in the
7245	 basic block.  If we are jumping to the end of our block, show
7246	 that we can have one usage of TO.  */
7247
7248      if (any_uncondjump_p (insn))
7249	{
7250	  if (to == 0)
7251	    {
7252	      free (qty_table);
7253	      return 0;
7254	    }
7255
7256	  if (JUMP_LABEL (insn) == to)
7257	    to_usage = 1;
7258
7259	  /* Maybe TO was deleted because the jump is unconditional.
7260	     If so, there is nothing left in this basic block.  */
7261	  /* ??? Perhaps it would be smarter to set TO
7262	     to whatever follows this insn,
7263	     and pretend the basic block had always ended here.  */
7264	  if (INSN_DELETED_P (to))
7265	    break;
7266
7267	  insn = PREV_INSN (to);
7268	}
7269
7270      /* See if it is ok to keep on going past the label
7271	 which used to end our basic block.  Remember that we incremented
7272	 the count of that label, so we decrement it here.  If we made
7273	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7274	 want to count the use in that jump.  */
7275
7276      if (to != 0 && NEXT_INSN (insn) == to
7277	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7278	{
7279	  struct cse_basic_block_data val;
7280	  rtx prev;
7281
7282	  insn = NEXT_INSN (to);
7283
7284	  /* If TO was the last insn in the function, we are done.  */
7285	  if (insn == 0)
7286	    {
7287	      free (qty_table);
7288	      return 0;
7289	    }
7290
7291	  /* If TO was preceded by a BARRIER we are done with this block
7292	     because it has no continuation.  */
7293	  prev = prev_nonnote_insn (to);
7294	  if (prev && GET_CODE (prev) == BARRIER)
7295	    {
7296	      free (qty_table);
7297	      return insn;
7298	    }
7299
7300	  /* Find the end of the following block.  Note that we won't be
7301	     following branches in this case.  */
7302	  to_usage = 0;
7303	  val.path_size = 0;
7304	  val.path = xmalloc (sizeof (struct branch_path)
7305			      * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7306	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
7307	  free (val.path);
7308
7309	  /* If the tables we allocated have enough space left
7310	     to handle all the SETs in the next basic block,
7311	     continue through it.  Otherwise, return,
7312	     and that block will be scanned individually.  */
7313	  if (val.nsets * 2 + next_qty > max_qty)
7314	    break;
7315
7316	  cse_basic_block_start = val.low_cuid;
7317	  cse_basic_block_end = val.high_cuid;
7318	  to = val.last;
7319
7320	  /* Prevent TO from being deleted if it is a label.  */
7321	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7322	    ++LABEL_NUSES (to);
7323
7324	  /* Back up so we process the first insn in the extension.  */
7325	  insn = PREV_INSN (insn);
7326	}
7327    }
7328
7329  if (next_qty > max_qty)
7330    abort ();
7331
7332  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7333     the previous insn is the only insn that branches to the head of a loop,
7334     we can cse into the loop.  Don't do this if we changed the jump
7335     structure of a loop unless we aren't going to be following jumps.  */
7336
7337  insn = prev_nonnote_insn (to);
7338  if ((cse_jumps_altered == 0
7339       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7340      && around_loop && to != 0
7341      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7342      && GET_CODE (insn) == JUMP_INSN
7343      && JUMP_LABEL (insn) != 0
7344      && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7345    cse_around_loop (JUMP_LABEL (insn));
7346
7347  free (qty_table);
7348
7349  return to ? NEXT_INSN (to) : 0;
7350}
7351
7352/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7353   there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7354
7355static int
7356check_for_label_ref (rtx *rtl, void *data)
7357{
7358  rtx insn = (rtx) data;
7359
7360  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7361     we must rerun jump since it needs to place the note.  If this is a
7362     LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7363     since no REG_LABEL will be added.  */
7364  return (GET_CODE (*rtl) == LABEL_REF
7365	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7366	  && LABEL_P (XEXP (*rtl, 0))
7367	  && INSN_UID (XEXP (*rtl, 0)) != 0
7368	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7369}
7370
7371/* Count the number of times registers are used (not set) in X.
7372   COUNTS is an array in which we accumulate the count, INCR is how much
7373   we count each register usage.  */
7374
7375static void
7376count_reg_usage (rtx x, int *counts, int incr)
7377{
7378  enum rtx_code code;
7379  rtx note;
7380  const char *fmt;
7381  int i, j;
7382
7383  if (x == 0)
7384    return;
7385
7386  switch (code = GET_CODE (x))
7387    {
7388    case REG:
7389      counts[REGNO (x)] += incr;
7390      return;
7391
7392    case PC:
7393    case CC0:
7394    case CONST:
7395    case CONST_INT:
7396    case CONST_DOUBLE:
7397    case CONST_VECTOR:
7398    case SYMBOL_REF:
7399    case LABEL_REF:
7400      return;
7401
7402    case CLOBBER:
7403      /* If we are clobbering a MEM, mark any registers inside the address
7404         as being used.  */
7405      if (GET_CODE (XEXP (x, 0)) == MEM)
7406	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7407      return;
7408
7409    case SET:
7410      /* Unless we are setting a REG, count everything in SET_DEST.  */
7411      if (GET_CODE (SET_DEST (x)) != REG)
7412	count_reg_usage (SET_DEST (x), counts, incr);
7413      count_reg_usage (SET_SRC (x), counts, incr);
7414      return;
7415
7416    case CALL_INSN:
7417      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7418      /* Fall through.  */
7419
7420    case INSN:
7421    case JUMP_INSN:
7422      count_reg_usage (PATTERN (x), counts, incr);
7423
7424      /* Things used in a REG_EQUAL note aren't dead since loop may try to
7425	 use them.  */
7426
7427      note = find_reg_equal_equiv_note (x);
7428      if (note)
7429	{
7430	  rtx eqv = XEXP (note, 0);
7431
7432	  if (GET_CODE (eqv) == EXPR_LIST)
7433	  /* This REG_EQUAL note describes the result of a function call.
7434	     Process all the arguments.  */
7435	    do
7436	      {
7437		count_reg_usage (XEXP (eqv, 0), counts, incr);
7438		eqv = XEXP (eqv, 1);
7439	      }
7440	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
7441	  else
7442	    count_reg_usage (eqv, counts, incr);
7443	}
7444      return;
7445
7446    case EXPR_LIST:
7447      if (REG_NOTE_KIND (x) == REG_EQUAL
7448	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7449	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7450	     involving registers in the address.  */
7451	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
7452	count_reg_usage (XEXP (x, 0), counts, incr);
7453
7454      count_reg_usage (XEXP (x, 1), counts, incr);
7455      return;
7456
7457    case ASM_OPERANDS:
7458      /* Iterate over just the inputs, not the constraints as well.  */
7459      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7460	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7461      return;
7462
7463    case INSN_LIST:
7464      abort ();
7465
7466    default:
7467      break;
7468    }
7469
7470  fmt = GET_RTX_FORMAT (code);
7471  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7472    {
7473      if (fmt[i] == 'e')
7474	count_reg_usage (XEXP (x, i), counts, incr);
7475      else if (fmt[i] == 'E')
7476	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7477	  count_reg_usage (XVECEXP (x, i, j), counts, incr);
7478    }
7479}
7480
7481/* Return true if set is live.  */
7482static bool
7483set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
7484	    int *counts)
7485{
7486#ifdef HAVE_cc0
7487  rtx tem;
7488#endif
7489
7490  if (set_noop_p (set))
7491    ;
7492
7493#ifdef HAVE_cc0
7494  else if (GET_CODE (SET_DEST (set)) == CC0
7495	   && !side_effects_p (SET_SRC (set))
7496	   && ((tem = next_nonnote_insn (insn)) == 0
7497	       || !INSN_P (tem)
7498	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7499    return false;
7500#endif
7501  else if (GET_CODE (SET_DEST (set)) != REG
7502	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7503	   || counts[REGNO (SET_DEST (set))] != 0
7504	   || side_effects_p (SET_SRC (set))
7505	   /* An ADDRESSOF expression can turn into a use of the
7506	      internal arg pointer, so always consider the
7507	      internal arg pointer live.  If it is truly dead,
7508	      flow will delete the initializing insn.  */
7509	   || (SET_DEST (set) == current_function_internal_arg_pointer))
7510    return true;
7511  return false;
7512}
7513
7514/* Return true if insn is live.  */
7515
7516static bool
7517insn_live_p (rtx insn, int *counts)
7518{
7519  int i;
7520  if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7521    return true;
7522  else if (GET_CODE (PATTERN (insn)) == SET)
7523    return set_live_p (PATTERN (insn), insn, counts);
7524  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7525    {
7526      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7527	{
7528	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7529
7530	  if (GET_CODE (elt) == SET)
7531	    {
7532	      if (set_live_p (elt, insn, counts))
7533		return true;
7534	    }
7535	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7536	    return true;
7537	}
7538      return false;
7539    }
7540  else
7541    return true;
7542}
7543
7544/* Return true if libcall is dead as a whole.  */
7545
7546static bool
7547dead_libcall_p (rtx insn, int *counts)
7548{
7549  rtx note, set, new;
7550
7551  /* See if there's a REG_EQUAL note on this insn and try to
7552     replace the source with the REG_EQUAL expression.
7553
7554     We assume that insns with REG_RETVALs can only be reg->reg
7555     copies at this point.  */
7556  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7557  if (!note)
7558    return false;
7559
7560  set = single_set (insn);
7561  if (!set)
7562    return false;
7563
7564  new = simplify_rtx (XEXP (note, 0));
7565  if (!new)
7566    new = XEXP (note, 0);
7567
7568  /* While changing insn, we must update the counts accordingly.  */
7569  count_reg_usage (insn, counts, -1);
7570
7571  if (validate_change (insn, &SET_SRC (set), new, 0))
7572    {
7573      count_reg_usage (insn, counts, 1);
7574      remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7575      remove_note (insn, note);
7576      return true;
7577    }
7578
7579  if (CONSTANT_P (new))
7580    {
7581      new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7582      if (new && validate_change (insn, &SET_SRC (set), new, 0))
7583	{
7584	  count_reg_usage (insn, counts, 1);
7585	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7586	  remove_note (insn, note);
7587	  return true;
7588	}
7589    }
7590
7591  count_reg_usage (insn, counts, 1);
7592  return false;
7593}
7594
7595/* Scan all the insns and delete any that are dead; i.e., they store a register
7596   that is never used or they copy a register to itself.
7597
7598   This is used to remove insns made obviously dead by cse, loop or other
7599   optimizations.  It improves the heuristics in loop since it won't try to
7600   move dead invariants out of loops or make givs for dead quantities.  The
7601   remaining passes of the compilation are also sped up.  */
7602
7603int
7604delete_trivially_dead_insns (rtx insns, int nreg)
7605{
7606  int *counts;
7607  rtx insn, prev;
7608  int in_libcall = 0, dead_libcall = 0;
7609  int ndead = 0, nlastdead, niterations = 0;
7610
7611  timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7612  /* First count the number of times each register is used.  */
7613  counts = xcalloc (nreg, sizeof (int));
7614  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7615    count_reg_usage (insn, counts, 1);
7616
7617  do
7618    {
7619      nlastdead = ndead;
7620      niterations++;
7621      /* Go from the last insn to the first and delete insns that only set unused
7622	 registers or copy a register to itself.  As we delete an insn, remove
7623	 usage counts for registers it uses.
7624
7625	 The first jump optimization pass may leave a real insn as the last
7626	 insn in the function.   We must not skip that insn or we may end
7627	 up deleting code that is not really dead.  */
7628      insn = get_last_insn ();
7629      if (! INSN_P (insn))
7630	insn = prev_real_insn (insn);
7631
7632      for (; insn; insn = prev)
7633	{
7634	  int live_insn = 0;
7635
7636	  prev = prev_real_insn (insn);
7637
7638	  /* Don't delete any insns that are part of a libcall block unless
7639	     we can delete the whole libcall block.
7640
7641	     Flow or loop might get confused if we did that.  Remember
7642	     that we are scanning backwards.  */
7643	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7644	    {
7645	      in_libcall = 1;
7646	      live_insn = 1;
7647	      dead_libcall = dead_libcall_p (insn, counts);
7648	    }
7649	  else if (in_libcall)
7650	    live_insn = ! dead_libcall;
7651	  else
7652	    live_insn = insn_live_p (insn, counts);
7653
7654	  /* If this is a dead insn, delete it and show registers in it aren't
7655	     being used.  */
7656
7657	  if (! live_insn)
7658	    {
7659	      count_reg_usage (insn, counts, -1);
7660	      delete_insn_and_edges (insn);
7661	      ndead++;
7662	    }
7663
7664	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7665	    {
7666	      in_libcall = 0;
7667	      dead_libcall = 0;
7668	    }
7669	}
7670    }
7671  while (ndead != nlastdead);
7672
7673  if (rtl_dump_file && ndead)
7674    fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7675	     ndead, niterations);
7676  /* Clean up.  */
7677  free (counts);
7678  timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7679  return ndead;
7680}
7681
7682/* This function is called via for_each_rtx.  The argument, NEWREG, is
7683   a condition code register with the desired mode.  If we are looking
7684   at the same register in a different mode, replace it with
7685   NEWREG.  */
7686
7687static int
7688cse_change_cc_mode (rtx *loc, void *data)
7689{
7690  rtx newreg = (rtx) data;
7691
7692  if (*loc
7693      && GET_CODE (*loc) == REG
7694      && REGNO (*loc) == REGNO (newreg)
7695      && GET_MODE (*loc) != GET_MODE (newreg))
7696    {
7697      *loc = newreg;
7698      return -1;
7699    }
7700  return 0;
7701}
7702
7703/* Change the mode of any reference to the register REGNO (NEWREG) to
7704   GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7705   any instruction which modifies NEWREG.  */
7706
7707static void
7708cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7709{
7710  rtx insn;
7711
7712  for (insn = start; insn != end; insn = NEXT_INSN (insn))
7713    {
7714      if (! INSN_P (insn))
7715	continue;
7716
7717      if (reg_set_p (newreg, insn))
7718	return;
7719
7720      for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7721      for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, newreg);
7722    }
7723}
7724
7725/* BB is a basic block which finishes with CC_REG as a condition code
7726   register which is set to CC_SRC.  Look through the successors of BB
7727   to find blocks which have a single predecessor (i.e., this one),
7728   and look through those blocks for an assignment to CC_REG which is
7729   equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7730   permitted to change the mode of CC_SRC to a compatible mode.  This
7731   returns VOIDmode if no equivalent assignments were found.
7732   Otherwise it returns the mode which CC_SRC should wind up with.
7733
7734   The main complexity in this function is handling the mode issues.
7735   We may have more than one duplicate which we can eliminate, and we
7736   try to find a mode which will work for multiple duplicates.  */
7737
7738static enum machine_mode
7739cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7740{
7741  bool found_equiv;
7742  enum machine_mode mode;
7743  unsigned int insn_count;
7744  edge e;
7745  rtx insns[2];
7746  enum machine_mode modes[2];
7747  rtx last_insns[2];
7748  unsigned int i;
7749  rtx newreg;
7750
7751  /* We expect to have two successors.  Look at both before picking
7752     the final mode for the comparison.  If we have more successors
7753     (i.e., some sort of table jump, although that seems unlikely),
7754     then we require all beyond the first two to use the same
7755     mode.  */
7756
7757  found_equiv = false;
7758  mode = GET_MODE (cc_src);
7759  insn_count = 0;
7760  for (e = bb->succ; e; e = e->succ_next)
7761    {
7762      rtx insn;
7763      rtx end;
7764
7765      if (e->flags & EDGE_COMPLEX)
7766	continue;
7767
7768      if (! e->dest->pred
7769	  || e->dest->pred->pred_next
7770	  || e->dest == EXIT_BLOCK_PTR)
7771	continue;
7772
7773      end = NEXT_INSN (BB_END (e->dest));
7774      for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7775	{
7776	  rtx set;
7777
7778	  if (! INSN_P (insn))
7779	    continue;
7780
7781	  /* If CC_SRC is modified, we have to stop looking for
7782	     something which uses it.  */
7783	  if (modified_in_p (cc_src, insn))
7784	    break;
7785
7786	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7787	  set = single_set (insn);
7788	  if (set
7789	      && GET_CODE (SET_DEST (set)) == REG
7790	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7791	    {
7792	      bool found;
7793	      enum machine_mode set_mode;
7794	      enum machine_mode comp_mode;
7795
7796	      found = false;
7797	      set_mode = GET_MODE (SET_SRC (set));
7798	      comp_mode = set_mode;
7799	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7800		found = true;
7801	      else if (GET_CODE (cc_src) == COMPARE
7802		       && GET_CODE (SET_SRC (set)) == COMPARE
7803		       && mode != set_mode
7804		       && rtx_equal_p (XEXP (cc_src, 0),
7805				       XEXP (SET_SRC (set), 0))
7806		       && rtx_equal_p (XEXP (cc_src, 1),
7807				       XEXP (SET_SRC (set), 1)))
7808
7809		{
7810		  comp_mode = (*targetm.cc_modes_compatible) (mode, set_mode);
7811		  if (comp_mode != VOIDmode
7812		      && (can_change_mode || comp_mode == mode))
7813		    found = true;
7814		}
7815
7816	      if (found)
7817		{
7818		  found_equiv = true;
7819		  if (insn_count < ARRAY_SIZE (insns))
7820		    {
7821		      insns[insn_count] = insn;
7822		      modes[insn_count] = set_mode;
7823		      last_insns[insn_count] = end;
7824		      ++insn_count;
7825
7826		      if (mode != comp_mode)
7827			{
7828			  if (! can_change_mode)
7829			    abort ();
7830			  mode = comp_mode;
7831			  PUT_MODE (cc_src, mode);
7832			}
7833		    }
7834		  else
7835		    {
7836		      if (set_mode != mode)
7837			{
7838			  /* We found a matching expression in the
7839			     wrong mode, but we don't have room to
7840			     store it in the array.  Punt.  This case
7841			     should be rare.  */
7842			  break;
7843			}
7844		      /* INSN sets CC_REG to a value equal to CC_SRC
7845			 with the right mode.  We can simply delete
7846			 it.  */
7847		      delete_insn (insn);
7848		    }
7849
7850		  /* We found an instruction to delete.  Keep looking,
7851		     in the hopes of finding a three-way jump.  */
7852		  continue;
7853		}
7854
7855	      /* We found an instruction which sets the condition
7856		 code, so don't look any farther.  */
7857	      break;
7858	    }
7859
7860	  /* If INSN sets CC_REG in some other way, don't look any
7861	     farther.  */
7862	  if (reg_set_p (cc_reg, insn))
7863	    break;
7864	}
7865
7866      /* If we fell off the bottom of the block, we can keep looking
7867	 through successors.  We pass CAN_CHANGE_MODE as false because
7868	 we aren't prepared to handle compatibility between the
7869	 further blocks and this block.  */
7870      if (insn == end)
7871	{
7872	  enum machine_mode submode;
7873
7874	  submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7875	  if (submode != VOIDmode)
7876	    {
7877	      if (submode != mode)
7878		abort ();
7879	      found_equiv = true;
7880	      can_change_mode = false;
7881	    }
7882	}
7883    }
7884
7885  if (! found_equiv)
7886    return VOIDmode;
7887
7888  /* Now INSN_COUNT is the number of instructions we found which set
7889     CC_REG to a value equivalent to CC_SRC.  The instructions are in
7890     INSNS.  The modes used by those instructions are in MODES.  */
7891
7892  newreg = NULL_RTX;
7893  for (i = 0; i < insn_count; ++i)
7894    {
7895      if (modes[i] != mode)
7896	{
7897	  /* We need to change the mode of CC_REG in INSNS[i] and
7898	     subsequent instructions.  */
7899	  if (! newreg)
7900	    {
7901	      if (GET_MODE (cc_reg) == mode)
7902		newreg = cc_reg;
7903	      else
7904		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7905	    }
7906	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7907				    newreg);
7908	}
7909
7910      delete_insn (insns[i]);
7911    }
7912
7913  return mode;
7914}
7915
7916/* If we have a fixed condition code register (or two), walk through
7917   the instructions and try to eliminate duplicate assignments.  */
7918
7919void
7920cse_condition_code_reg (void)
7921{
7922  unsigned int cc_regno_1;
7923  unsigned int cc_regno_2;
7924  rtx cc_reg_1;
7925  rtx cc_reg_2;
7926  basic_block bb;
7927
7928  if (! (*targetm.fixed_condition_code_regs) (&cc_regno_1, &cc_regno_2))
7929    return;
7930
7931  cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7932  if (cc_regno_2 != INVALID_REGNUM)
7933    cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7934  else
7935    cc_reg_2 = NULL_RTX;
7936
7937  FOR_EACH_BB (bb)
7938    {
7939      rtx last_insn;
7940      rtx cc_reg;
7941      rtx insn;
7942      rtx cc_src_insn;
7943      rtx cc_src;
7944      enum machine_mode mode;
7945      enum machine_mode orig_mode;
7946
7947      /* Look for blocks which end with a conditional jump based on a
7948	 condition code register.  Then look for the instruction which
7949	 sets the condition code register.  Then look through the
7950	 successor blocks for instructions which set the condition
7951	 code register to the same value.  There are other possible
7952	 uses of the condition code register, but these are by far the
7953	 most common and the ones which we are most likely to be able
7954	 to optimize.  */
7955
7956      last_insn = BB_END (bb);
7957      if (GET_CODE (last_insn) != JUMP_INSN)
7958	continue;
7959
7960      if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7961	cc_reg = cc_reg_1;
7962      else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7963	cc_reg = cc_reg_2;
7964      else
7965	continue;
7966
7967      cc_src_insn = NULL_RTX;
7968      cc_src = NULL_RTX;
7969      for (insn = PREV_INSN (last_insn);
7970	   insn && insn != PREV_INSN (BB_HEAD (bb));
7971	   insn = PREV_INSN (insn))
7972	{
7973	  rtx set;
7974
7975	  if (! INSN_P (insn))
7976	    continue;
7977	  set = single_set (insn);
7978	  if (set
7979	      && GET_CODE (SET_DEST (set)) == REG
7980	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7981	    {
7982	      cc_src_insn = insn;
7983	      cc_src = SET_SRC (set);
7984	      break;
7985	    }
7986	  else if (reg_set_p (cc_reg, insn))
7987	    break;
7988	}
7989
7990      if (! cc_src_insn)
7991	continue;
7992
7993      if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7994	continue;
7995
7996      /* Now CC_REG is a condition code register used for a
7997	 conditional jump at the end of the block, and CC_SRC, in
7998	 CC_SRC_INSN, is the value to which that condition code
7999	 register is set, and CC_SRC is still meaningful at the end of
8000	 the basic block.  */
8001
8002      orig_mode = GET_MODE (cc_src);
8003      mode = cse_cc_succs (bb, cc_reg, cc_src, true);
8004      if (mode != VOIDmode)
8005	{
8006	  if (mode != GET_MODE (cc_src))
8007	    abort ();
8008	  if (mode != orig_mode)
8009	    {
8010	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
8011
8012	      /* Change the mode of CC_REG in CC_SRC_INSN to
8013		 GET_MODE (NEWREG).  */
8014	      for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
8015			    newreg);
8016	      for_each_rtx (&REG_NOTES (cc_src_insn), cse_change_cc_mode,
8017			    newreg);
8018
8019	      /* Do the same in the following insns that use the
8020		 current value of CC_REG within BB.  */
8021	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
8022					NEXT_INSN (last_insn),
8023					newreg);
8024	    }
8025	}
8026    }
8027}
8028