cse.c revision 132718
11573Srgrimes/* Common subexpression elimination for GNU compiler.
21573Srgrimes   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
31573Srgrimes   1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
41573Srgrimes
51573SrgrimesThis file is part of GCC.
61573Srgrimes
71573SrgrimesGCC is free software; you can redistribute it and/or modify it under
81573Srgrimesthe terms of the GNU General Public License as published by the Free
91573SrgrimesSoftware Foundation; either version 2, or (at your option) any later
101573Srgrimesversion.
111573Srgrimes
121573SrgrimesGCC is distributed in the hope that it will be useful, but WITHOUT ANY
131573SrgrimesWARRANTY; without even the implied warranty of MERCHANTABILITY or
141573SrgrimesFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
151573Srgrimesfor more details.
161573Srgrimes
171573SrgrimesYou should have received a copy of the GNU General Public License
181573Srgrimesalong with GCC; see the file COPYING.  If not, write to the Free
191573SrgrimesSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
201573Srgrimes02111-1307, USA.  */
211573Srgrimes
221573Srgrimes#include "config.h"
231573Srgrimes/* stdio.h must precede rtl.h for FFS.  */
241573Srgrimes#include "system.h"
251573Srgrimes#include "coretypes.h"
261573Srgrimes#include "tm.h"
271573Srgrimes
281573Srgrimes#include "rtl.h"
291573Srgrimes#include "tm_p.h"
301573Srgrimes#include "regs.h"
311573Srgrimes#include "hard-reg-set.h"
321573Srgrimes#include "basic-block.h"
3392986Sobrien#include "flags.h"
3492986Sobrien#include "real.h"
351573Srgrimes#include "insn-config.h"
361573Srgrimes#include "recog.h"
371573Srgrimes#include "function.h"
3824927Sbde#include "expr.h"
39171219Speter#include "toplev.h"
4024927Sbde#include "output.h"
411573Srgrimes#include "ggc.h"
421573Srgrimes#include "timevar.h"
431573Srgrimes#include "except.h"
441573Srgrimes#include "target.h"
451573Srgrimes#include "params.h"
461573Srgrimes
4724927Sbde/* The basic idea of common subexpression elimination is to go
481573Srgrimes   through the code, keeping a record of expressions that would
491573Srgrimes   have the same value at the current scan point, and replacing
501573Srgrimes   expressions encountered with the cheapest equivalent expression.
51171219Speter
52171219Speter   It is too complicated to keep track of the different possibilities
53171219Speter   when control paths merge in this code; so, at each label, we forget all
54171219Speter   that is known and start fresh.  This can be described as processing each
551573Srgrimes   extended basic block separately.  We have a separate pass to perform
56   global CSE.
57
58   Note CSE can turn a conditional or computed jump into a nop or
59   an unconditional jump.  When this occurs we arrange to run the jump
60   optimizer after CSE to delete the unreachable code.
61
62   We use two data structures to record the equivalent expressions:
63   a hash table for most expressions, and a vector of "quantity
64   numbers" to record equivalent (pseudo) registers.
65
66   The use of the special data structure for registers is desirable
67   because it is faster.  It is possible because registers references
68   contain a fairly small number, the register number, taken from
69   a contiguously allocated series, and two register references are
70   identical if they have the same number.  General expressions
71   do not have any such thing, so the only way to retrieve the
72   information recorded on an expression other than a register
73   is to keep it in a hash table.
74
75Registers and "quantity numbers":
76
77   At the start of each basic block, all of the (hardware and pseudo)
78   registers used in the function are given distinct quantity
79   numbers to indicate their contents.  During scan, when the code
80   copies one register into another, we copy the quantity number.
81   When a register is loaded in any other way, we allocate a new
82   quantity number to describe the value generated by this operation.
83   `reg_qty' records what quantity a register is currently thought
84   of as containing.
85
86   All real quantity numbers are greater than or equal to `max_reg'.
87   If register N has not been assigned a quantity, reg_qty[N] will equal N.
88
89   Quantity numbers below `max_reg' do not exist and none of the `qty_table'
90   entries should be referenced with an index below `max_reg'.
91
92   We also maintain a bidirectional chain of registers for each
93   quantity number.  The `qty_table` members `first_reg' and `last_reg',
94   and `reg_eqv_table' members `next' and `prev' hold these chains.
95
96   The first register in a chain is the one whose lifespan is least local.
97   Among equals, it is the one that was seen first.
98   We replace any equivalent register with that one.
99
100   If two registers have the same quantity number, it must be true that
101   REG expressions with qty_table `mode' must be in the hash table for both
102   registers and must be in the same class.
103
104   The converse is not true.  Since hard registers may be referenced in
105   any mode, two REG expressions might be equivalent in the hash table
106   but not have the same quantity number if the quantity number of one
107   of the registers is not the same mode as those expressions.
108
109Constants and quantity numbers
110
111   When a quantity has a known constant value, that value is stored
112   in the appropriate qty_table `const_rtx'.  This is in addition to
113   putting the constant in the hash table as is usual for non-regs.
114
115   Whether a reg or a constant is preferred is determined by the configuration
116   macro CONST_COSTS and will often depend on the constant value.  In any
117   event, expressions containing constants can be simplified, by fold_rtx.
118
119   When a quantity has a known nearly constant value (such as an address
120   of a stack slot), that value is stored in the appropriate qty_table
121   `const_rtx'.
122
123   Integer constants don't have a machine mode.  However, cse
124   determines the intended machine mode from the destination
125   of the instruction that moves the constant.  The machine mode
126   is recorded in the hash table along with the actual RTL
127   constant expression so that different modes are kept separate.
128
129Other expressions:
130
131   To record known equivalences among expressions in general
132   we use a hash table called `table'.  It has a fixed number of buckets
133   that contain chains of `struct table_elt' elements for expressions.
134   These chains connect the elements whose expressions have the same
135   hash codes.
136
137   Other chains through the same elements connect the elements which
138   currently have equivalent values.
139
140   Register references in an expression are canonicalized before hashing
141   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
142   The hash code of a register reference is computed using the quantity
143   number, not the register number.
144
145   When the value of an expression changes, it is necessary to remove from the
146   hash table not just that expression but all expressions whose values
147   could be different as a result.
148
149     1. If the value changing is in memory, except in special cases
150     ANYTHING referring to memory could be changed.  That is because
151     nobody knows where a pointer does not point.
152     The function `invalidate_memory' removes what is necessary.
153
154     The special cases are when the address is constant or is
155     a constant plus a fixed register such as the frame pointer
156     or a static chain pointer.  When such addresses are stored in,
157     we can tell exactly which other such addresses must be invalidated
158     due to overlap.  `invalidate' does this.
159     All expressions that refer to non-constant
160     memory addresses are also invalidated.  `invalidate_memory' does this.
161
162     2. If the value changing is a register, all expressions
163     containing references to that register, and only those,
164     must be removed.
165
166   Because searching the entire hash table for expressions that contain
167   a register is very slow, we try to figure out when it isn't necessary.
168   Precisely, this is necessary only when expressions have been
169   entered in the hash table using this register, and then the value has
170   changed, and then another expression wants to be added to refer to
171   the register's new value.  This sequence of circumstances is rare
172   within any one basic block.
173
174   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
175   reg_tick[i] is incremented whenever a value is stored in register i.
176   reg_in_table[i] holds -1 if no references to register i have been
177   entered in the table; otherwise, it contains the value reg_tick[i] had
178   when the references were entered.  If we want to enter a reference
179   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
180   Until we want to enter a new entry, the mere fact that the two vectors
181   don't match makes the entries be ignored if anyone tries to match them.
182
183   Registers themselves are entered in the hash table as well as in
184   the equivalent-register chains.  However, the vectors `reg_tick'
185   and `reg_in_table' do not apply to expressions which are simple
186   register references.  These expressions are removed from the table
187   immediately when they become invalid, and this can be done even if
188   we do not immediately search for all the expressions that refer to
189   the register.
190
191   A CLOBBER rtx in an instruction invalidates its operand for further
192   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
193   invalidates everything that resides in memory.
194
195Related expressions:
196
197   Constant expressions that differ only by an additive integer
198   are called related.  When a constant expression is put in
199   the table, the related expression with no constant term
200   is also entered.  These are made to point at each other
201   so that it is possible to find out if there exists any
202   register equivalent to an expression related to a given expression.  */
203
204/* One plus largest register number used in this function.  */
205
206static int max_reg;
207
208/* One plus largest instruction UID used in this function at time of
209   cse_main call.  */
210
211static int max_insn_uid;
212
213/* Length of qty_table vector.  We know in advance we will not need
214   a quantity number this big.  */
215
216static int max_qty;
217
218/* Next quantity number to be allocated.
219   This is 1 + the largest number needed so far.  */
220
221static int next_qty;
222
223/* Per-qty information tracking.
224
225   `first_reg' and `last_reg' track the head and tail of the
226   chain of registers which currently contain this quantity.
227
228   `mode' contains the machine mode of this quantity.
229
230   `const_rtx' holds the rtx of the constant value of this
231   quantity, if known.  A summations of the frame/arg pointer
232   and a constant can also be entered here.  When this holds
233   a known value, `const_insn' is the insn which stored the
234   constant value.
235
236   `comparison_{code,const,qty}' are used to track when a
237   comparison between a quantity and some constant or register has
238   been passed.  In such a case, we know the results of the comparison
239   in case we see it again.  These members record a comparison that
240   is known to be true.  `comparison_code' holds the rtx code of such
241   a comparison, else it is set to UNKNOWN and the other two
242   comparison members are undefined.  `comparison_const' holds
243   the constant being compared against, or zero if the comparison
244   is not against a constant.  `comparison_qty' holds the quantity
245   being compared against when the result is known.  If the comparison
246   is not with a register, `comparison_qty' is -1.  */
247
248struct qty_table_elem
249{
250  rtx const_rtx;
251  rtx const_insn;
252  rtx comparison_const;
253  int comparison_qty;
254  unsigned int first_reg, last_reg;
255  /* The sizes of these fields should match the sizes of the
256     code and mode fields of struct rtx_def (see rtl.h).  */
257  ENUM_BITFIELD(rtx_code) comparison_code : 16;
258  ENUM_BITFIELD(machine_mode) mode : 8;
259};
260
261/* The table of all qtys, indexed by qty number.  */
262static struct qty_table_elem *qty_table;
263
264#ifdef HAVE_cc0
265/* For machines that have a CC0, we do not record its value in the hash
266   table since its use is guaranteed to be the insn immediately following
267   its definition and any other insn is presumed to invalidate it.
268
269   Instead, we store below the value last assigned to CC0.  If it should
270   happen to be a constant, it is stored in preference to the actual
271   assigned value.  In case it is a constant, we store the mode in which
272   the constant should be interpreted.  */
273
274static rtx prev_insn_cc0;
275static enum machine_mode prev_insn_cc0_mode;
276
277/* Previous actual insn.  0 if at first insn of basic block.  */
278
279static rtx prev_insn;
280#endif
281
282/* Insn being scanned.  */
283
284static rtx this_insn;
285
286/* Index by register number, gives the number of the next (or
287   previous) register in the chain of registers sharing the same
288   value.
289
290   Or -1 if this register is at the end of the chain.
291
292   If reg_qty[N] == N, reg_eqv_table[N].next is undefined.  */
293
294/* Per-register equivalence chain.  */
295struct reg_eqv_elem
296{
297  int next, prev;
298};
299
300/* The table of all register equivalence chains.  */
301static struct reg_eqv_elem *reg_eqv_table;
302
303struct cse_reg_info
304{
305  /* Next in hash chain.  */
306  struct cse_reg_info *hash_next;
307
308  /* The next cse_reg_info structure in the free or used list.  */
309  struct cse_reg_info *next;
310
311  /* Search key */
312  unsigned int regno;
313
314  /* The quantity number of the register's current contents.  */
315  int reg_qty;
316
317  /* The number of times the register has been altered in the current
318     basic block.  */
319  int reg_tick;
320
321  /* The REG_TICK value at which rtx's containing this register are
322     valid in the hash table.  If this does not equal the current
323     reg_tick value, such expressions existing in the hash table are
324     invalid.  */
325  int reg_in_table;
326
327  /* The SUBREG that was set when REG_TICK was last incremented.  Set
328     to -1 if the last store was to the whole register, not a subreg.  */
329  unsigned int subreg_ticked;
330};
331
332/* A free list of cse_reg_info entries.  */
333static struct cse_reg_info *cse_reg_info_free_list;
334
335/* A used list of cse_reg_info entries.  */
336static struct cse_reg_info *cse_reg_info_used_list;
337static struct cse_reg_info *cse_reg_info_used_list_end;
338
339/* A mapping from registers to cse_reg_info data structures.  */
340#define REGHASH_SHIFT	7
341#define REGHASH_SIZE	(1 << REGHASH_SHIFT)
342#define REGHASH_MASK	(REGHASH_SIZE - 1)
343static struct cse_reg_info *reg_hash[REGHASH_SIZE];
344
345#define REGHASH_FN(REGNO)	\
346	(((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
347
348/* The last lookup we did into the cse_reg_info_tree.  This allows us
349   to cache repeated lookups.  */
350static unsigned int cached_regno;
351static struct cse_reg_info *cached_cse_reg_info;
352
353/* A HARD_REG_SET containing all the hard registers for which there is
354   currently a REG expression in the hash table.  Note the difference
355   from the above variables, which indicate if the REG is mentioned in some
356   expression in the table.  */
357
358static HARD_REG_SET hard_regs_in_table;
359
360/* CUID of insn that starts the basic block currently being cse-processed.  */
361
362static int cse_basic_block_start;
363
364/* CUID of insn that ends the basic block currently being cse-processed.  */
365
366static int cse_basic_block_end;
367
368/* Vector mapping INSN_UIDs to cuids.
369   The cuids are like uids but increase monotonically always.
370   We use them to see whether a reg is used outside a given basic block.  */
371
372static int *uid_cuid;
373
374/* Highest UID in UID_CUID.  */
375static int max_uid;
376
377/* Get the cuid of an insn.  */
378
379#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
380
381/* Nonzero if this pass has made changes, and therefore it's
382   worthwhile to run the garbage collector.  */
383
384static int cse_altered;
385
386/* Nonzero if cse has altered conditional jump insns
387   in such a way that jump optimization should be redone.  */
388
389static int cse_jumps_altered;
390
391/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
392   REG_LABEL, we have to rerun jump after CSE to put in the note.  */
393static int recorded_label_ref;
394
395/* canon_hash stores 1 in do_not_record
396   if it notices a reference to CC0, PC, or some other volatile
397   subexpression.  */
398
399static int do_not_record;
400
401#ifdef LOAD_EXTEND_OP
402
403/* Scratch rtl used when looking for load-extended copy of a MEM.  */
404static rtx memory_extend_rtx;
405#endif
406
407/* canon_hash stores 1 in hash_arg_in_memory
408   if it notices a reference to memory within the expression being hashed.  */
409
410static int hash_arg_in_memory;
411
412/* The hash table contains buckets which are chains of `struct table_elt's,
413   each recording one expression's information.
414   That expression is in the `exp' field.
415
416   The canon_exp field contains a canonical (from the point of view of
417   alias analysis) version of the `exp' field.
418
419   Those elements with the same hash code are chained in both directions
420   through the `next_same_hash' and `prev_same_hash' fields.
421
422   Each set of expressions with equivalent values
423   are on a two-way chain through the `next_same_value'
424   and `prev_same_value' fields, and all point with
425   the `first_same_value' field at the first element in
426   that chain.  The chain is in order of increasing cost.
427   Each element's cost value is in its `cost' field.
428
429   The `in_memory' field is nonzero for elements that
430   involve any reference to memory.  These elements are removed
431   whenever a write is done to an unidentified location in memory.
432   To be safe, we assume that a memory address is unidentified unless
433   the address is either a symbol constant or a constant plus
434   the frame pointer or argument pointer.
435
436   The `related_value' field is used to connect related expressions
437   (that differ by adding an integer).
438   The related expressions are chained in a circular fashion.
439   `related_value' is zero for expressions for which this
440   chain is not useful.
441
442   The `cost' field stores the cost of this element's expression.
443   The `regcost' field stores the value returned by approx_reg_cost for
444   this element's expression.
445
446   The `is_const' flag is set if the element is a constant (including
447   a fixed address).
448
449   The `flag' field is used as a temporary during some search routines.
450
451   The `mode' field is usually the same as GET_MODE (`exp'), but
452   if `exp' is a CONST_INT and has no machine mode then the `mode'
453   field is the mode it was being used as.  Each constant is
454   recorded separately for each mode it is used with.  */
455
456struct table_elt
457{
458  rtx exp;
459  rtx canon_exp;
460  struct table_elt *next_same_hash;
461  struct table_elt *prev_same_hash;
462  struct table_elt *next_same_value;
463  struct table_elt *prev_same_value;
464  struct table_elt *first_same_value;
465  struct table_elt *related_value;
466  int cost;
467  int regcost;
468  /* The size of this field should match the size
469     of the mode field of struct rtx_def (see rtl.h).  */
470  ENUM_BITFIELD(machine_mode) mode : 8;
471  char in_memory;
472  char is_const;
473  char flag;
474};
475
476/* We don't want a lot of buckets, because we rarely have very many
477   things stored in the hash table, and a lot of buckets slows
478   down a lot of loops that happen frequently.  */
479#define HASH_SHIFT	5
480#define HASH_SIZE	(1 << HASH_SHIFT)
481#define HASH_MASK	(HASH_SIZE - 1)
482
483/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
484   register (hard registers may require `do_not_record' to be set).  */
485
486#define HASH(X, M)	\
487 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
488  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
489  : canon_hash (X, M)) & HASH_MASK)
490
491/* Determine whether register number N is considered a fixed register for the
492   purpose of approximating register costs.
493   It is desirable to replace other regs with fixed regs, to reduce need for
494   non-fixed hard regs.
495   A reg wins if it is either the frame pointer or designated as fixed.  */
496#define FIXED_REGNO_P(N)  \
497  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498   || fixed_regs[N] || global_regs[N])
499
500/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
501   hard registers and pointers into the frame are the cheapest with a cost
502   of 0.  Next come pseudos with a cost of one and other hard registers with
503   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
504
505#define CHEAP_REGNO(N) \
506  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM	\
507   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM		\
508   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER)	\
509   || ((N) < FIRST_PSEUDO_REGISTER					\
510       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
511
512#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
513#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
514
515/* Get the info associated with register N.  */
516
517#define GET_CSE_REG_INFO(N)			\
518  (((N) == cached_regno && cached_cse_reg_info)	\
519   ? cached_cse_reg_info : get_cse_reg_info ((N)))
520
521/* Get the number of times this register has been updated in this
522   basic block.  */
523
524#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
525
526/* Get the point at which REG was recorded in the table.  */
527
528#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
529
530/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
531   SUBREG).  */
532
533#define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
534
535/* Get the quantity number for REG.  */
536
537#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
538
539/* Determine if the quantity number for register X represents a valid index
540   into the qty_table.  */
541
542#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
543
544static struct table_elt *table[HASH_SIZE];
545
546/* Chain of `struct table_elt's made so far for this function
547   but currently removed from the table.  */
548
549static struct table_elt *free_element_chain;
550
551/* Number of `struct table_elt' structures made so far for this function.  */
552
553static int n_elements_made;
554
555/* Maximum value `n_elements_made' has had so far in this compilation
556   for functions previously processed.  */
557
558static int max_elements_made;
559
560/* Surviving equivalence class when two equivalence classes are merged
561   by recording the effects of a jump in the last insn.  Zero if the
562   last insn was not a conditional jump.  */
563
564static struct table_elt *last_jump_equiv_class;
565
566/* Set to the cost of a constant pool reference if one was found for a
567   symbolic constant.  If this was found, it means we should try to
568   convert constants into constant pool entries if they don't fit in
569   the insn.  */
570
571static int constant_pool_entries_cost;
572static int constant_pool_entries_regcost;
573
574/* This data describes a block that will be processed by cse_basic_block.  */
575
576struct cse_basic_block_data
577{
578  /* Lowest CUID value of insns in block.  */
579  int low_cuid;
580  /* Highest CUID value of insns in block.  */
581  int high_cuid;
582  /* Total number of SETs in block.  */
583  int nsets;
584  /* Last insn in the block.  */
585  rtx last;
586  /* Size of current branch path, if any.  */
587  int path_size;
588  /* Current branch path, indicating which branches will be taken.  */
589  struct branch_path
590    {
591      /* The branch insn.  */
592      rtx branch;
593      /* Whether it should be taken or not.  AROUND is the same as taken
594	 except that it is used when the destination label is not preceded
595       by a BARRIER.  */
596      enum taken {TAKEN, NOT_TAKEN, AROUND} status;
597    } *path;
598};
599
600static bool fixed_base_plus_p (rtx x);
601static int notreg_cost (rtx, enum rtx_code);
602static int approx_reg_cost_1 (rtx *, void *);
603static int approx_reg_cost (rtx);
604static int preferrable (int, int, int, int);
605static void new_basic_block (void);
606static void make_new_qty (unsigned int, enum machine_mode);
607static void make_regs_eqv (unsigned int, unsigned int);
608static void delete_reg_equiv (unsigned int);
609static int mention_regs (rtx);
610static int insert_regs (rtx, struct table_elt *, int);
611static void remove_from_table (struct table_elt *, unsigned);
612static struct table_elt *lookup	(rtx, unsigned, enum machine_mode);
613static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
614static rtx lookup_as_function (rtx, enum rtx_code);
615static struct table_elt *insert (rtx, struct table_elt *, unsigned,
616				 enum machine_mode);
617static void merge_equiv_classes (struct table_elt *, struct table_elt *);
618static void invalidate (rtx, enum machine_mode);
619static int cse_rtx_varies_p (rtx, int);
620static void remove_invalid_refs (unsigned int);
621static void remove_invalid_subreg_refs (unsigned int, unsigned int,
622					enum machine_mode);
623static void rehash_using_reg (rtx);
624static void invalidate_memory (void);
625static void invalidate_for_call (void);
626static rtx use_related_value (rtx, struct table_elt *);
627static unsigned canon_hash (rtx, enum machine_mode);
628static unsigned canon_hash_string (const char *);
629static unsigned safe_hash (rtx, enum machine_mode);
630static int exp_equiv_p (rtx, rtx, int, int);
631static rtx canon_reg (rtx, rtx);
632static void find_best_addr (rtx, rtx *, enum machine_mode);
633static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
634					   enum machine_mode *,
635					   enum machine_mode *);
636static rtx fold_rtx (rtx, rtx);
637static rtx equiv_constant (rtx);
638static void record_jump_equiv (rtx, int);
639static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
640			      int);
641static void cse_insn (rtx, rtx);
642static int addr_affects_sp_p (rtx);
643static void invalidate_from_clobbers (rtx);
644static rtx cse_process_notes (rtx, rtx);
645static void cse_around_loop (rtx);
646static void invalidate_skipped_set (rtx, rtx, void *);
647static void invalidate_skipped_block (rtx);
648static void cse_check_loop_start (rtx, rtx, void *);
649static void cse_set_around_loop (rtx, rtx, rtx);
650static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
651static void count_reg_usage (rtx, int *, int);
652static int check_for_label_ref (rtx *, void *);
653extern void dump_class (struct table_elt*);
654static struct cse_reg_info * get_cse_reg_info (unsigned int);
655static int check_dependence (rtx *, void *);
656
657static void flush_hash_table (void);
658static bool insn_live_p (rtx, int *);
659static bool set_live_p (rtx, rtx, int *);
660static bool dead_libcall_p (rtx, int *);
661static int cse_change_cc_mode (rtx *, void *);
662static void cse_change_cc_mode_insns (rtx, rtx, rtx);
663static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
664
665/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
666   virtual regs here because the simplify_*_operation routines are called
667   by integrate.c, which is called before virtual register instantiation.  */
668
669static bool
670fixed_base_plus_p (rtx x)
671{
672  switch (GET_CODE (x))
673    {
674    case REG:
675      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
676	return true;
677      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
678	return true;
679      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
680	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
681	return true;
682      return false;
683
684    case PLUS:
685      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
686	return false;
687      return fixed_base_plus_p (XEXP (x, 0));
688
689    case ADDRESSOF:
690      return true;
691
692    default:
693      return false;
694    }
695}
696
697/* Dump the expressions in the equivalence class indicated by CLASSP.
698   This function is used only for debugging.  */
699void
700dump_class (struct table_elt *classp)
701{
702  struct table_elt *elt;
703
704  fprintf (stderr, "Equivalence chain for ");
705  print_rtl (stderr, classp->exp);
706  fprintf (stderr, ": \n");
707
708  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
709    {
710      print_rtl (stderr, elt->exp);
711      fprintf (stderr, "\n");
712    }
713}
714
715/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
716
717static int
718approx_reg_cost_1 (rtx *xp, void *data)
719{
720  rtx x = *xp;
721  int *cost_p = data;
722
723  if (x && GET_CODE (x) == REG)
724    {
725      unsigned int regno = REGNO (x);
726
727      if (! CHEAP_REGNO (regno))
728	{
729	  if (regno < FIRST_PSEUDO_REGISTER)
730	    {
731	      if (SMALL_REGISTER_CLASSES)
732		return 1;
733	      *cost_p += 2;
734	    }
735	  else
736	    *cost_p += 1;
737	}
738    }
739
740  return 0;
741}
742
743/* Return an estimate of the cost of the registers used in an rtx.
744   This is mostly the number of different REG expressions in the rtx;
745   however for some exceptions like fixed registers we use a cost of
746   0.  If any other hard register reference occurs, return MAX_COST.  */
747
748static int
749approx_reg_cost (rtx x)
750{
751  int cost = 0;
752
753  if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
754    return MAX_COST;
755
756  return cost;
757}
758
759/* Return a negative value if an rtx A, whose costs are given by COST_A
760   and REGCOST_A, is more desirable than an rtx B.
761   Return a positive value if A is less desirable, or 0 if the two are
762   equally good.  */
763static int
764preferrable (int cost_a, int regcost_a, int cost_b, int regcost_b)
765{
766  /* First, get rid of cases involving expressions that are entirely
767     unwanted.  */
768  if (cost_a != cost_b)
769    {
770      if (cost_a == MAX_COST)
771	return 1;
772      if (cost_b == MAX_COST)
773	return -1;
774    }
775
776  /* Avoid extending lifetimes of hardregs.  */
777  if (regcost_a != regcost_b)
778    {
779      if (regcost_a == MAX_COST)
780	return 1;
781      if (regcost_b == MAX_COST)
782	return -1;
783    }
784
785  /* Normal operation costs take precedence.  */
786  if (cost_a != cost_b)
787    return cost_a - cost_b;
788  /* Only if these are identical consider effects on register pressure.  */
789  if (regcost_a != regcost_b)
790    return regcost_a - regcost_b;
791  return 0;
792}
793
794/* Internal function, to compute cost when X is not a register; called
795   from COST macro to keep it simple.  */
796
797static int
798notreg_cost (rtx x, enum rtx_code outer)
799{
800  return ((GET_CODE (x) == SUBREG
801	   && GET_CODE (SUBREG_REG (x)) == REG
802	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
803	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
804	   && (GET_MODE_SIZE (GET_MODE (x))
805	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
806	   && subreg_lowpart_p (x)
807	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
808				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
809	  ? 0
810	  : rtx_cost (x, outer) * 2);
811}
812
813/* Return an estimate of the cost of computing rtx X.
814   One use is in cse, to decide which expression to keep in the hash table.
815   Another is in rtl generation, to pick the cheapest way to multiply.
816   Other uses like the latter are expected in the future.  */
817
818int
819rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
820{
821  int i, j;
822  enum rtx_code code;
823  const char *fmt;
824  int total;
825
826  if (x == 0)
827    return 0;
828
829  /* Compute the default costs of certain things.
830     Note that targetm.rtx_costs can override the defaults.  */
831
832  code = GET_CODE (x);
833  switch (code)
834    {
835    case MULT:
836      total = COSTS_N_INSNS (5);
837      break;
838    case DIV:
839    case UDIV:
840    case MOD:
841    case UMOD:
842      total = COSTS_N_INSNS (7);
843      break;
844    case USE:
845      /* Used in loop.c and combine.c as a marker.  */
846      total = 0;
847      break;
848    default:
849      total = COSTS_N_INSNS (1);
850    }
851
852  switch (code)
853    {
854    case REG:
855      return 0;
856
857    case SUBREG:
858      /* If we can't tie these modes, make this expensive.  The larger
859	 the mode, the more expensive it is.  */
860      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
861	return COSTS_N_INSNS (2
862			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
863      break;
864
865    default:
866      if ((*targetm.rtx_costs) (x, code, outer_code, &total))
867	return total;
868      break;
869    }
870
871  /* Sum the costs of the sub-rtx's, plus cost of this operation,
872     which is already in total.  */
873
874  fmt = GET_RTX_FORMAT (code);
875  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
876    if (fmt[i] == 'e')
877      total += rtx_cost (XEXP (x, i), code);
878    else if (fmt[i] == 'E')
879      for (j = 0; j < XVECLEN (x, i); j++)
880	total += rtx_cost (XVECEXP (x, i, j), code);
881
882  return total;
883}
884
885/* Return cost of address expression X.
886   Expect that X is properly formed address reference.  */
887
888int
889address_cost (rtx x, enum machine_mode mode)
890{
891  /* The address_cost target hook does not deal with ADDRESSOF nodes.  But,
892     during CSE, such nodes are present.  Using an ADDRESSOF node which
893     refers to the address of a REG is a good thing because we can then
894     turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
895
896  if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
897    return -1;
898
899  /* We may be asked for cost of various unusual addresses, such as operands
900     of push instruction.  It is not worthwhile to complicate writing
901     of the target hook by such cases.  */
902
903  if (!memory_address_p (mode, x))
904    return 1000;
905
906  return (*targetm.address_cost) (x);
907}
908
909/* If the target doesn't override, compute the cost as with arithmetic.  */
910
911int
912default_address_cost (rtx x)
913{
914  return rtx_cost (x, MEM);
915}
916
917static struct cse_reg_info *
918get_cse_reg_info (unsigned int regno)
919{
920  struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
921  struct cse_reg_info *p;
922
923  for (p = *hash_head; p != NULL; p = p->hash_next)
924    if (p->regno == regno)
925      break;
926
927  if (p == NULL)
928    {
929      /* Get a new cse_reg_info structure.  */
930      if (cse_reg_info_free_list)
931	{
932	  p = cse_reg_info_free_list;
933	  cse_reg_info_free_list = p->next;
934	}
935      else
936	p = xmalloc (sizeof (struct cse_reg_info));
937
938      /* Insert into hash table.  */
939      p->hash_next = *hash_head;
940      *hash_head = p;
941
942      /* Initialize it.  */
943      p->reg_tick = 1;
944      p->reg_in_table = -1;
945      p->subreg_ticked = -1;
946      p->reg_qty = regno;
947      p->regno = regno;
948      p->next = cse_reg_info_used_list;
949      cse_reg_info_used_list = p;
950      if (!cse_reg_info_used_list_end)
951	cse_reg_info_used_list_end = p;
952    }
953
954  /* Cache this lookup; we tend to be looking up information about the
955     same register several times in a row.  */
956  cached_regno = regno;
957  cached_cse_reg_info = p;
958
959  return p;
960}
961
962/* Clear the hash table and initialize each register with its own quantity,
963   for a new basic block.  */
964
965static void
966new_basic_block (void)
967{
968  int i;
969
970  next_qty = max_reg;
971
972  /* Clear out hash table state for this pass.  */
973
974  memset (reg_hash, 0, sizeof reg_hash);
975
976  if (cse_reg_info_used_list)
977    {
978      cse_reg_info_used_list_end->next = cse_reg_info_free_list;
979      cse_reg_info_free_list = cse_reg_info_used_list;
980      cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
981    }
982  cached_cse_reg_info = 0;
983
984  CLEAR_HARD_REG_SET (hard_regs_in_table);
985
986  /* The per-quantity values used to be initialized here, but it is
987     much faster to initialize each as it is made in `make_new_qty'.  */
988
989  for (i = 0; i < HASH_SIZE; i++)
990    {
991      struct table_elt *first;
992
993      first = table[i];
994      if (first != NULL)
995	{
996	  struct table_elt *last = first;
997
998	  table[i] = NULL;
999
1000	  while (last->next_same_hash != NULL)
1001	    last = last->next_same_hash;
1002
1003	  /* Now relink this hash entire chain into
1004	     the free element list.  */
1005
1006	  last->next_same_hash = free_element_chain;
1007	  free_element_chain = first;
1008	}
1009    }
1010
1011#ifdef HAVE_cc0
1012  prev_insn = 0;
1013  prev_insn_cc0 = 0;
1014#endif
1015}
1016
1017/* Say that register REG contains a quantity in mode MODE not in any
1018   register before and initialize that quantity.  */
1019
1020static void
1021make_new_qty (unsigned int reg, enum machine_mode mode)
1022{
1023  int q;
1024  struct qty_table_elem *ent;
1025  struct reg_eqv_elem *eqv;
1026
1027  if (next_qty >= max_qty)
1028    abort ();
1029
1030  q = REG_QTY (reg) = next_qty++;
1031  ent = &qty_table[q];
1032  ent->first_reg = reg;
1033  ent->last_reg = reg;
1034  ent->mode = mode;
1035  ent->const_rtx = ent->const_insn = NULL_RTX;
1036  ent->comparison_code = UNKNOWN;
1037
1038  eqv = &reg_eqv_table[reg];
1039  eqv->next = eqv->prev = -1;
1040}
1041
1042/* Make reg NEW equivalent to reg OLD.
1043   OLD is not changing; NEW is.  */
1044
1045static void
1046make_regs_eqv (unsigned int new, unsigned int old)
1047{
1048  unsigned int lastr, firstr;
1049  int q = REG_QTY (old);
1050  struct qty_table_elem *ent;
1051
1052  ent = &qty_table[q];
1053
1054  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1055  if (! REGNO_QTY_VALID_P (old))
1056    abort ();
1057
1058  REG_QTY (new) = q;
1059  firstr = ent->first_reg;
1060  lastr = ent->last_reg;
1061
1062  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1063     hard regs.  Among pseudos, if NEW will live longer than any other reg
1064     of the same qty, and that is beyond the current basic block,
1065     make it the new canonical replacement for this qty.  */
1066  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1067      /* Certain fixed registers might be of the class NO_REGS.  This means
1068	 that not only can they not be allocated by the compiler, but
1069	 they cannot be used in substitutions or canonicalizations
1070	 either.  */
1071      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1072      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1073	  || (new >= FIRST_PSEUDO_REGISTER
1074	      && (firstr < FIRST_PSEUDO_REGISTER
1075		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1076		       || (uid_cuid[REGNO_FIRST_UID (new)]
1077			   < cse_basic_block_start))
1078		      && (uid_cuid[REGNO_LAST_UID (new)]
1079			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1080    {
1081      reg_eqv_table[firstr].prev = new;
1082      reg_eqv_table[new].next = firstr;
1083      reg_eqv_table[new].prev = -1;
1084      ent->first_reg = new;
1085    }
1086  else
1087    {
1088      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1089	 Otherwise, insert before any non-fixed hard regs that are at the
1090	 end.  Registers of class NO_REGS cannot be used as an
1091	 equivalent for anything.  */
1092      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1093	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1094	     && new >= FIRST_PSEUDO_REGISTER)
1095	lastr = reg_eqv_table[lastr].prev;
1096      reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1097      if (reg_eqv_table[lastr].next >= 0)
1098	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1099      else
1100	qty_table[q].last_reg = new;
1101      reg_eqv_table[lastr].next = new;
1102      reg_eqv_table[new].prev = lastr;
1103    }
1104}
1105
1106/* Remove REG from its equivalence class.  */
1107
1108static void
1109delete_reg_equiv (unsigned int reg)
1110{
1111  struct qty_table_elem *ent;
1112  int q = REG_QTY (reg);
1113  int p, n;
1114
1115  /* If invalid, do nothing.  */
1116  if (q == (int) reg)
1117    return;
1118
1119  ent = &qty_table[q];
1120
1121  p = reg_eqv_table[reg].prev;
1122  n = reg_eqv_table[reg].next;
1123
1124  if (n != -1)
1125    reg_eqv_table[n].prev = p;
1126  else
1127    ent->last_reg = p;
1128  if (p != -1)
1129    reg_eqv_table[p].next = n;
1130  else
1131    ent->first_reg = n;
1132
1133  REG_QTY (reg) = reg;
1134}
1135
1136/* Remove any invalid expressions from the hash table
1137   that refer to any of the registers contained in expression X.
1138
1139   Make sure that newly inserted references to those registers
1140   as subexpressions will be considered valid.
1141
1142   mention_regs is not called when a register itself
1143   is being stored in the table.
1144
1145   Return 1 if we have done something that may have changed the hash code
1146   of X.  */
1147
1148static int
1149mention_regs (rtx x)
1150{
1151  enum rtx_code code;
1152  int i, j;
1153  const char *fmt;
1154  int changed = 0;
1155
1156  if (x == 0)
1157    return 0;
1158
1159  code = GET_CODE (x);
1160  if (code == REG)
1161    {
1162      unsigned int regno = REGNO (x);
1163      unsigned int endregno
1164	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1165		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1166      unsigned int i;
1167
1168      for (i = regno; i < endregno; i++)
1169	{
1170	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1171	    remove_invalid_refs (i);
1172
1173	  REG_IN_TABLE (i) = REG_TICK (i);
1174	  SUBREG_TICKED (i) = -1;
1175	}
1176
1177      return 0;
1178    }
1179
1180  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1181     pseudo if they don't use overlapping words.  We handle only pseudos
1182     here for simplicity.  */
1183  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1184      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1185    {
1186      unsigned int i = REGNO (SUBREG_REG (x));
1187
1188      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1189	{
1190	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1191	     the last store to this register really stored into this
1192	     subreg, then remove the memory of this subreg.
1193	     Otherwise, remove any memory of the entire register and
1194	     all its subregs from the table.  */
1195	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1196	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1197	    remove_invalid_refs (i);
1198	  else
1199	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1200	}
1201
1202      REG_IN_TABLE (i) = REG_TICK (i);
1203      SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1204      return 0;
1205    }
1206
1207  /* If X is a comparison or a COMPARE and either operand is a register
1208     that does not have a quantity, give it one.  This is so that a later
1209     call to record_jump_equiv won't cause X to be assigned a different
1210     hash code and not found in the table after that call.
1211
1212     It is not necessary to do this here, since rehash_using_reg can
1213     fix up the table later, but doing this here eliminates the need to
1214     call that expensive function in the most common case where the only
1215     use of the register is in the comparison.  */
1216
1217  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1218    {
1219      if (GET_CODE (XEXP (x, 0)) == REG
1220	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1221	if (insert_regs (XEXP (x, 0), NULL, 0))
1222	  {
1223	    rehash_using_reg (XEXP (x, 0));
1224	    changed = 1;
1225	  }
1226
1227      if (GET_CODE (XEXP (x, 1)) == REG
1228	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1229	if (insert_regs (XEXP (x, 1), NULL, 0))
1230	  {
1231	    rehash_using_reg (XEXP (x, 1));
1232	    changed = 1;
1233	  }
1234    }
1235
1236  fmt = GET_RTX_FORMAT (code);
1237  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1238    if (fmt[i] == 'e')
1239      changed |= mention_regs (XEXP (x, i));
1240    else if (fmt[i] == 'E')
1241      for (j = 0; j < XVECLEN (x, i); j++)
1242	changed |= mention_regs (XVECEXP (x, i, j));
1243
1244  return changed;
1245}
1246
1247/* Update the register quantities for inserting X into the hash table
1248   with a value equivalent to CLASSP.
1249   (If the class does not contain a REG, it is irrelevant.)
1250   If MODIFIED is nonzero, X is a destination; it is being modified.
1251   Note that delete_reg_equiv should be called on a register
1252   before insert_regs is done on that register with MODIFIED != 0.
1253
1254   Nonzero value means that elements of reg_qty have changed
1255   so X's hash code may be different.  */
1256
1257static int
1258insert_regs (rtx x, struct table_elt *classp, int modified)
1259{
1260  if (GET_CODE (x) == REG)
1261    {
1262      unsigned int regno = REGNO (x);
1263      int qty_valid;
1264
1265      /* If REGNO is in the equivalence table already but is of the
1266	 wrong mode for that equivalence, don't do anything here.  */
1267
1268      qty_valid = REGNO_QTY_VALID_P (regno);
1269      if (qty_valid)
1270	{
1271	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1272
1273	  if (ent->mode != GET_MODE (x))
1274	    return 0;
1275	}
1276
1277      if (modified || ! qty_valid)
1278	{
1279	  if (classp)
1280	    for (classp = classp->first_same_value;
1281		 classp != 0;
1282		 classp = classp->next_same_value)
1283	      if (GET_CODE (classp->exp) == REG
1284		  && GET_MODE (classp->exp) == GET_MODE (x))
1285		{
1286		  make_regs_eqv (regno, REGNO (classp->exp));
1287		  return 1;
1288		}
1289
1290	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1291	     than REG_IN_TABLE to find out if there was only a single preceding
1292	     invalidation - for the SUBREG - or another one, which would be
1293	     for the full register.  However, if we find here that REG_TICK
1294	     indicates that the register is invalid, it means that it has
1295	     been invalidated in a separate operation.  The SUBREG might be used
1296	     now (then this is a recursive call), or we might use the full REG
1297	     now and a SUBREG of it later.  So bump up REG_TICK so that
1298	     mention_regs will do the right thing.  */
1299	  if (! modified
1300	      && REG_IN_TABLE (regno) >= 0
1301	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1302	    REG_TICK (regno)++;
1303	  make_new_qty (regno, GET_MODE (x));
1304	  return 1;
1305	}
1306
1307      return 0;
1308    }
1309
1310  /* If X is a SUBREG, we will likely be inserting the inner register in the
1311     table.  If that register doesn't have an assigned quantity number at
1312     this point but does later, the insertion that we will be doing now will
1313     not be accessible because its hash code will have changed.  So assign
1314     a quantity number now.  */
1315
1316  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1317	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1318    {
1319      insert_regs (SUBREG_REG (x), NULL, 0);
1320      mention_regs (x);
1321      return 1;
1322    }
1323  else
1324    return mention_regs (x);
1325}
1326
1327/* Look in or update the hash table.  */
1328
1329/* Remove table element ELT from use in the table.
1330   HASH is its hash code, made using the HASH macro.
1331   It's an argument because often that is known in advance
1332   and we save much time not recomputing it.  */
1333
1334static void
1335remove_from_table (struct table_elt *elt, unsigned int hash)
1336{
1337  if (elt == 0)
1338    return;
1339
1340  /* Mark this element as removed.  See cse_insn.  */
1341  elt->first_same_value = 0;
1342
1343  /* Remove the table element from its equivalence class.  */
1344
1345  {
1346    struct table_elt *prev = elt->prev_same_value;
1347    struct table_elt *next = elt->next_same_value;
1348
1349    if (next)
1350      next->prev_same_value = prev;
1351
1352    if (prev)
1353      prev->next_same_value = next;
1354    else
1355      {
1356	struct table_elt *newfirst = next;
1357	while (next)
1358	  {
1359	    next->first_same_value = newfirst;
1360	    next = next->next_same_value;
1361	  }
1362      }
1363  }
1364
1365  /* Remove the table element from its hash bucket.  */
1366
1367  {
1368    struct table_elt *prev = elt->prev_same_hash;
1369    struct table_elt *next = elt->next_same_hash;
1370
1371    if (next)
1372      next->prev_same_hash = prev;
1373
1374    if (prev)
1375      prev->next_same_hash = next;
1376    else if (table[hash] == elt)
1377      table[hash] = next;
1378    else
1379      {
1380	/* This entry is not in the proper hash bucket.  This can happen
1381	   when two classes were merged by `merge_equiv_classes'.  Search
1382	   for the hash bucket that it heads.  This happens only very
1383	   rarely, so the cost is acceptable.  */
1384	for (hash = 0; hash < HASH_SIZE; hash++)
1385	  if (table[hash] == elt)
1386	    table[hash] = next;
1387      }
1388  }
1389
1390  /* Remove the table element from its related-value circular chain.  */
1391
1392  if (elt->related_value != 0 && elt->related_value != elt)
1393    {
1394      struct table_elt *p = elt->related_value;
1395
1396      while (p->related_value != elt)
1397	p = p->related_value;
1398      p->related_value = elt->related_value;
1399      if (p->related_value == p)
1400	p->related_value = 0;
1401    }
1402
1403  /* Now add it to the free element chain.  */
1404  elt->next_same_hash = free_element_chain;
1405  free_element_chain = elt;
1406}
1407
1408/* Look up X in the hash table and return its table element,
1409   or 0 if X is not in the table.
1410
1411   MODE is the machine-mode of X, or if X is an integer constant
1412   with VOIDmode then MODE is the mode with which X will be used.
1413
1414   Here we are satisfied to find an expression whose tree structure
1415   looks like X.  */
1416
1417static struct table_elt *
1418lookup (rtx x, unsigned int hash, enum machine_mode mode)
1419{
1420  struct table_elt *p;
1421
1422  for (p = table[hash]; p; p = p->next_same_hash)
1423    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1424			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1425      return p;
1426
1427  return 0;
1428}
1429
1430/* Like `lookup' but don't care whether the table element uses invalid regs.
1431   Also ignore discrepancies in the machine mode of a register.  */
1432
1433static struct table_elt *
1434lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1435{
1436  struct table_elt *p;
1437
1438  if (GET_CODE (x) == REG)
1439    {
1440      unsigned int regno = REGNO (x);
1441
1442      /* Don't check the machine mode when comparing registers;
1443	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1444      for (p = table[hash]; p; p = p->next_same_hash)
1445	if (GET_CODE (p->exp) == REG
1446	    && REGNO (p->exp) == regno)
1447	  return p;
1448    }
1449  else
1450    {
1451      for (p = table[hash]; p; p = p->next_same_hash)
1452	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1453	  return p;
1454    }
1455
1456  return 0;
1457}
1458
1459/* Look for an expression equivalent to X and with code CODE.
1460   If one is found, return that expression.  */
1461
1462static rtx
1463lookup_as_function (rtx x, enum rtx_code code)
1464{
1465  struct table_elt *p
1466    = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1467
1468  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1469     long as we are narrowing.  So if we looked in vain for a mode narrower
1470     than word_mode before, look for word_mode now.  */
1471  if (p == 0 && code == CONST_INT
1472      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1473    {
1474      x = copy_rtx (x);
1475      PUT_MODE (x, word_mode);
1476      p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1477    }
1478
1479  if (p == 0)
1480    return 0;
1481
1482  for (p = p->first_same_value; p; p = p->next_same_value)
1483    if (GET_CODE (p->exp) == code
1484	/* Make sure this is a valid entry in the table.  */
1485	&& exp_equiv_p (p->exp, p->exp, 1, 0))
1486      return p->exp;
1487
1488  return 0;
1489}
1490
1491/* Insert X in the hash table, assuming HASH is its hash code
1492   and CLASSP is an element of the class it should go in
1493   (or 0 if a new class should be made).
1494   It is inserted at the proper position to keep the class in
1495   the order cheapest first.
1496
1497   MODE is the machine-mode of X, or if X is an integer constant
1498   with VOIDmode then MODE is the mode with which X will be used.
1499
1500   For elements of equal cheapness, the most recent one
1501   goes in front, except that the first element in the list
1502   remains first unless a cheaper element is added.  The order of
1503   pseudo-registers does not matter, as canon_reg will be called to
1504   find the cheapest when a register is retrieved from the table.
1505
1506   The in_memory field in the hash table element is set to 0.
1507   The caller must set it nonzero if appropriate.
1508
1509   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1510   and if insert_regs returns a nonzero value
1511   you must then recompute its hash code before calling here.
1512
1513   If necessary, update table showing constant values of quantities.  */
1514
1515#define CHEAPER(X, Y) \
1516 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1517
1518static struct table_elt *
1519insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1520{
1521  struct table_elt *elt;
1522
1523  /* If X is a register and we haven't made a quantity for it,
1524     something is wrong.  */
1525  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1526    abort ();
1527
1528  /* If X is a hard register, show it is being put in the table.  */
1529  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1530    {
1531      unsigned int regno = REGNO (x);
1532      unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1533      unsigned int i;
1534
1535      for (i = regno; i < endregno; i++)
1536	SET_HARD_REG_BIT (hard_regs_in_table, i);
1537    }
1538
1539  /* Put an element for X into the right hash bucket.  */
1540
1541  elt = free_element_chain;
1542  if (elt)
1543    free_element_chain = elt->next_same_hash;
1544  else
1545    {
1546      n_elements_made++;
1547      elt = xmalloc (sizeof (struct table_elt));
1548    }
1549
1550  elt->exp = x;
1551  elt->canon_exp = NULL_RTX;
1552  elt->cost = COST (x);
1553  elt->regcost = approx_reg_cost (x);
1554  elt->next_same_value = 0;
1555  elt->prev_same_value = 0;
1556  elt->next_same_hash = table[hash];
1557  elt->prev_same_hash = 0;
1558  elt->related_value = 0;
1559  elt->in_memory = 0;
1560  elt->mode = mode;
1561  elt->is_const = (CONSTANT_P (x)
1562		   /* GNU C++ takes advantage of this for `this'
1563		      (and other const values).  */
1564		   || (GET_CODE (x) == REG
1565		       && RTX_UNCHANGING_P (x)
1566		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1567		   || fixed_base_plus_p (x));
1568
1569  if (table[hash])
1570    table[hash]->prev_same_hash = elt;
1571  table[hash] = elt;
1572
1573  /* Put it into the proper value-class.  */
1574  if (classp)
1575    {
1576      classp = classp->first_same_value;
1577      if (CHEAPER (elt, classp))
1578	/* Insert at the head of the class.  */
1579	{
1580	  struct table_elt *p;
1581	  elt->next_same_value = classp;
1582	  classp->prev_same_value = elt;
1583	  elt->first_same_value = elt;
1584
1585	  for (p = classp; p; p = p->next_same_value)
1586	    p->first_same_value = elt;
1587	}
1588      else
1589	{
1590	  /* Insert not at head of the class.  */
1591	  /* Put it after the last element cheaper than X.  */
1592	  struct table_elt *p, *next;
1593
1594	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1595	       p = next);
1596
1597	  /* Put it after P and before NEXT.  */
1598	  elt->next_same_value = next;
1599	  if (next)
1600	    next->prev_same_value = elt;
1601
1602	  elt->prev_same_value = p;
1603	  p->next_same_value = elt;
1604	  elt->first_same_value = classp;
1605	}
1606    }
1607  else
1608    elt->first_same_value = elt;
1609
1610  /* If this is a constant being set equivalent to a register or a register
1611     being set equivalent to a constant, note the constant equivalence.
1612
1613     If this is a constant, it cannot be equivalent to a different constant,
1614     and a constant is the only thing that can be cheaper than a register.  So
1615     we know the register is the head of the class (before the constant was
1616     inserted).
1617
1618     If this is a register that is not already known equivalent to a
1619     constant, we must check the entire class.
1620
1621     If this is a register that is already known equivalent to an insn,
1622     update the qtys `const_insn' to show that `this_insn' is the latest
1623     insn making that quantity equivalent to the constant.  */
1624
1625  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1626      && GET_CODE (x) != REG)
1627    {
1628      int exp_q = REG_QTY (REGNO (classp->exp));
1629      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1630
1631      exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1632      exp_ent->const_insn = this_insn;
1633    }
1634
1635  else if (GET_CODE (x) == REG
1636	   && classp
1637	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1638	   && ! elt->is_const)
1639    {
1640      struct table_elt *p;
1641
1642      for (p = classp; p != 0; p = p->next_same_value)
1643	{
1644	  if (p->is_const && GET_CODE (p->exp) != REG)
1645	    {
1646	      int x_q = REG_QTY (REGNO (x));
1647	      struct qty_table_elem *x_ent = &qty_table[x_q];
1648
1649	      x_ent->const_rtx
1650		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1651	      x_ent->const_insn = this_insn;
1652	      break;
1653	    }
1654	}
1655    }
1656
1657  else if (GET_CODE (x) == REG
1658	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1659	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1660    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1661
1662  /* If this is a constant with symbolic value,
1663     and it has a term with an explicit integer value,
1664     link it up with related expressions.  */
1665  if (GET_CODE (x) == CONST)
1666    {
1667      rtx subexp = get_related_value (x);
1668      unsigned subhash;
1669      struct table_elt *subelt, *subelt_prev;
1670
1671      if (subexp != 0)
1672	{
1673	  /* Get the integer-free subexpression in the hash table.  */
1674	  subhash = safe_hash (subexp, mode) & HASH_MASK;
1675	  subelt = lookup (subexp, subhash, mode);
1676	  if (subelt == 0)
1677	    subelt = insert (subexp, NULL, subhash, mode);
1678	  /* Initialize SUBELT's circular chain if it has none.  */
1679	  if (subelt->related_value == 0)
1680	    subelt->related_value = subelt;
1681	  /* Find the element in the circular chain that precedes SUBELT.  */
1682	  subelt_prev = subelt;
1683	  while (subelt_prev->related_value != subelt)
1684	    subelt_prev = subelt_prev->related_value;
1685	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1686	     This way the element that follows SUBELT is the oldest one.  */
1687	  elt->related_value = subelt_prev->related_value;
1688	  subelt_prev->related_value = elt;
1689	}
1690    }
1691
1692  return elt;
1693}
1694
1695/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1696   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1697   the two classes equivalent.
1698
1699   CLASS1 will be the surviving class; CLASS2 should not be used after this
1700   call.
1701
1702   Any invalid entries in CLASS2 will not be copied.  */
1703
1704static void
1705merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1706{
1707  struct table_elt *elt, *next, *new;
1708
1709  /* Ensure we start with the head of the classes.  */
1710  class1 = class1->first_same_value;
1711  class2 = class2->first_same_value;
1712
1713  /* If they were already equal, forget it.  */
1714  if (class1 == class2)
1715    return;
1716
1717  for (elt = class2; elt; elt = next)
1718    {
1719      unsigned int hash;
1720      rtx exp = elt->exp;
1721      enum machine_mode mode = elt->mode;
1722
1723      next = elt->next_same_value;
1724
1725      /* Remove old entry, make a new one in CLASS1's class.
1726	 Don't do this for invalid entries as we cannot find their
1727	 hash code (it also isn't necessary).  */
1728      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1729	{
1730	  bool need_rehash = false;
1731
1732	  hash_arg_in_memory = 0;
1733	  hash = HASH (exp, mode);
1734
1735	  if (GET_CODE (exp) == REG)
1736	    {
1737	      need_rehash = (unsigned) REG_QTY (REGNO (exp)) != REGNO (exp);
1738	      delete_reg_equiv (REGNO (exp));
1739	    }
1740
1741	  remove_from_table (elt, hash);
1742
1743	  if (insert_regs (exp, class1, 0) || need_rehash)
1744	    {
1745	      rehash_using_reg (exp);
1746	      hash = HASH (exp, mode);
1747	    }
1748	  new = insert (exp, class1, hash, mode);
1749	  new->in_memory = hash_arg_in_memory;
1750	}
1751    }
1752}
1753
1754/* Flush the entire hash table.  */
1755
1756static void
1757flush_hash_table (void)
1758{
1759  int i;
1760  struct table_elt *p;
1761
1762  for (i = 0; i < HASH_SIZE; i++)
1763    for (p = table[i]; p; p = table[i])
1764      {
1765	/* Note that invalidate can remove elements
1766	   after P in the current hash chain.  */
1767	if (GET_CODE (p->exp) == REG)
1768	  invalidate (p->exp, p->mode);
1769	else
1770	  remove_from_table (p, i);
1771      }
1772}
1773
1774/* Function called for each rtx to check whether true dependence exist.  */
1775struct check_dependence_data
1776{
1777  enum machine_mode mode;
1778  rtx exp;
1779  rtx addr;
1780};
1781
1782static int
1783check_dependence (rtx *x, void *data)
1784{
1785  struct check_dependence_data *d = (struct check_dependence_data *) data;
1786  if (*x && GET_CODE (*x) == MEM)
1787    return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1788		    		  cse_rtx_varies_p);
1789  else
1790    return 0;
1791}
1792
1793/* Remove from the hash table, or mark as invalid, all expressions whose
1794   values could be altered by storing in X.  X is a register, a subreg, or
1795   a memory reference with nonvarying address (because, when a memory
1796   reference with a varying address is stored in, all memory references are
1797   removed by invalidate_memory so specific invalidation is superfluous).
1798   FULL_MODE, if not VOIDmode, indicates that this much should be
1799   invalidated instead of just the amount indicated by the mode of X.  This
1800   is only used for bitfield stores into memory.
1801
1802   A nonvarying address may be just a register or just a symbol reference,
1803   or it may be either of those plus a numeric offset.  */
1804
1805static void
1806invalidate (rtx x, enum machine_mode full_mode)
1807{
1808  int i;
1809  struct table_elt *p;
1810  rtx addr;
1811
1812  switch (GET_CODE (x))
1813    {
1814    case REG:
1815      {
1816	/* If X is a register, dependencies on its contents are recorded
1817	   through the qty number mechanism.  Just change the qty number of
1818	   the register, mark it as invalid for expressions that refer to it,
1819	   and remove it itself.  */
1820	unsigned int regno = REGNO (x);
1821	unsigned int hash = HASH (x, GET_MODE (x));
1822
1823	/* Remove REGNO from any quantity list it might be on and indicate
1824	   that its value might have changed.  If it is a pseudo, remove its
1825	   entry from the hash table.
1826
1827	   For a hard register, we do the first two actions above for any
1828	   additional hard registers corresponding to X.  Then, if any of these
1829	   registers are in the table, we must remove any REG entries that
1830	   overlap these registers.  */
1831
1832	delete_reg_equiv (regno);
1833	REG_TICK (regno)++;
1834	SUBREG_TICKED (regno) = -1;
1835
1836	if (regno >= FIRST_PSEUDO_REGISTER)
1837	  {
1838	    /* Because a register can be referenced in more than one mode,
1839	       we might have to remove more than one table entry.  */
1840	    struct table_elt *elt;
1841
1842	    while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1843	      remove_from_table (elt, hash);
1844	  }
1845	else
1846	  {
1847	    HOST_WIDE_INT in_table
1848	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1849	    unsigned int endregno
1850	      = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1851	    unsigned int tregno, tendregno, rn;
1852	    struct table_elt *p, *next;
1853
1854	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1855
1856	    for (rn = regno + 1; rn < endregno; rn++)
1857	      {
1858		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1859		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1860		delete_reg_equiv (rn);
1861		REG_TICK (rn)++;
1862		SUBREG_TICKED (rn) = -1;
1863	      }
1864
1865	    if (in_table)
1866	      for (hash = 0; hash < HASH_SIZE; hash++)
1867		for (p = table[hash]; p; p = next)
1868		  {
1869		    next = p->next_same_hash;
1870
1871		    if (GET_CODE (p->exp) != REG
1872			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1873		      continue;
1874
1875		    tregno = REGNO (p->exp);
1876		    tendregno
1877		      = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1878		    if (tendregno > regno && tregno < endregno)
1879		      remove_from_table (p, hash);
1880		  }
1881	  }
1882      }
1883      return;
1884
1885    case SUBREG:
1886      invalidate (SUBREG_REG (x), VOIDmode);
1887      return;
1888
1889    case PARALLEL:
1890      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1891	invalidate (XVECEXP (x, 0, i), VOIDmode);
1892      return;
1893
1894    case EXPR_LIST:
1895      /* This is part of a disjoint return value; extract the location in
1896	 question ignoring the offset.  */
1897      invalidate (XEXP (x, 0), VOIDmode);
1898      return;
1899
1900    case MEM:
1901      addr = canon_rtx (get_addr (XEXP (x, 0)));
1902      /* Calculate the canonical version of X here so that
1903	 true_dependence doesn't generate new RTL for X on each call.  */
1904      x = canon_rtx (x);
1905
1906      /* Remove all hash table elements that refer to overlapping pieces of
1907	 memory.  */
1908      if (full_mode == VOIDmode)
1909	full_mode = GET_MODE (x);
1910
1911      for (i = 0; i < HASH_SIZE; i++)
1912	{
1913	  struct table_elt *next;
1914
1915	  for (p = table[i]; p; p = next)
1916	    {
1917	      next = p->next_same_hash;
1918	      if (p->in_memory)
1919		{
1920		  struct check_dependence_data d;
1921
1922		  /* Just canonicalize the expression once;
1923		     otherwise each time we call invalidate
1924		     true_dependence will canonicalize the
1925		     expression again.  */
1926		  if (!p->canon_exp)
1927		    p->canon_exp = canon_rtx (p->exp);
1928		  d.exp = x;
1929		  d.addr = addr;
1930		  d.mode = full_mode;
1931		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1932		    remove_from_table (p, i);
1933		}
1934	    }
1935	}
1936      return;
1937
1938    default:
1939      abort ();
1940    }
1941}
1942
1943/* Remove all expressions that refer to register REGNO,
1944   since they are already invalid, and we are about to
1945   mark that register valid again and don't want the old
1946   expressions to reappear as valid.  */
1947
1948static void
1949remove_invalid_refs (unsigned int regno)
1950{
1951  unsigned int i;
1952  struct table_elt *p, *next;
1953
1954  for (i = 0; i < HASH_SIZE; i++)
1955    for (p = table[i]; p; p = next)
1956      {
1957	next = p->next_same_hash;
1958	if (GET_CODE (p->exp) != REG
1959	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1960	  remove_from_table (p, i);
1961      }
1962}
1963
1964/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1965   and mode MODE.  */
1966static void
1967remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1968			    enum machine_mode mode)
1969{
1970  unsigned int i;
1971  struct table_elt *p, *next;
1972  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1973
1974  for (i = 0; i < HASH_SIZE; i++)
1975    for (p = table[i]; p; p = next)
1976      {
1977	rtx exp = p->exp;
1978	next = p->next_same_hash;
1979
1980	if (GET_CODE (exp) != REG
1981	    && (GET_CODE (exp) != SUBREG
1982		|| GET_CODE (SUBREG_REG (exp)) != REG
1983		|| REGNO (SUBREG_REG (exp)) != regno
1984		|| (((SUBREG_BYTE (exp)
1985		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1986		    && SUBREG_BYTE (exp) <= end))
1987	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1988	  remove_from_table (p, i);
1989      }
1990}
1991
1992/* Recompute the hash codes of any valid entries in the hash table that
1993   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1994
1995   This is called when we make a jump equivalence.  */
1996
1997static void
1998rehash_using_reg (rtx x)
1999{
2000  unsigned int i;
2001  struct table_elt *p, *next;
2002  unsigned hash;
2003
2004  if (GET_CODE (x) == SUBREG)
2005    x = SUBREG_REG (x);
2006
2007  /* If X is not a register or if the register is known not to be in any
2008     valid entries in the table, we have no work to do.  */
2009
2010  if (GET_CODE (x) != REG
2011      || REG_IN_TABLE (REGNO (x)) < 0
2012      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2013    return;
2014
2015  /* Scan all hash chains looking for valid entries that mention X.
2016     If we find one and it is in the wrong hash chain, move it.  */
2017
2018  for (i = 0; i < HASH_SIZE; i++)
2019    for (p = table[i]; p; p = next)
2020      {
2021	next = p->next_same_hash;
2022	if (reg_mentioned_p (x, p->exp)
2023	    && exp_equiv_p (p->exp, p->exp, 1, 0)
2024	    && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2025	  {
2026	    if (p->next_same_hash)
2027	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2028
2029	    if (p->prev_same_hash)
2030	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2031	    else
2032	      table[i] = p->next_same_hash;
2033
2034	    p->next_same_hash = table[hash];
2035	    p->prev_same_hash = 0;
2036	    if (table[hash])
2037	      table[hash]->prev_same_hash = p;
2038	    table[hash] = p;
2039	  }
2040      }
2041}
2042
2043/* Remove from the hash table any expression that is a call-clobbered
2044   register.  Also update their TICK values.  */
2045
2046static void
2047invalidate_for_call (void)
2048{
2049  unsigned int regno, endregno;
2050  unsigned int i;
2051  unsigned hash;
2052  struct table_elt *p, *next;
2053  int in_table = 0;
2054
2055  /* Go through all the hard registers.  For each that is clobbered in
2056     a CALL_INSN, remove the register from quantity chains and update
2057     reg_tick if defined.  Also see if any of these registers is currently
2058     in the table.  */
2059
2060  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2061    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2062      {
2063	delete_reg_equiv (regno);
2064	if (REG_TICK (regno) >= 0)
2065	  {
2066	    REG_TICK (regno)++;
2067	    SUBREG_TICKED (regno) = -1;
2068	  }
2069
2070	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2071      }
2072
2073  /* In the case where we have no call-clobbered hard registers in the
2074     table, we are done.  Otherwise, scan the table and remove any
2075     entry that overlaps a call-clobbered register.  */
2076
2077  if (in_table)
2078    for (hash = 0; hash < HASH_SIZE; hash++)
2079      for (p = table[hash]; p; p = next)
2080	{
2081	  next = p->next_same_hash;
2082
2083	  if (GET_CODE (p->exp) != REG
2084	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2085	    continue;
2086
2087	  regno = REGNO (p->exp);
2088	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2089
2090	  for (i = regno; i < endregno; i++)
2091	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2092	      {
2093		remove_from_table (p, hash);
2094		break;
2095	      }
2096	}
2097}
2098
2099/* Given an expression X of type CONST,
2100   and ELT which is its table entry (or 0 if it
2101   is not in the hash table),
2102   return an alternate expression for X as a register plus integer.
2103   If none can be found, return 0.  */
2104
2105static rtx
2106use_related_value (rtx x, struct table_elt *elt)
2107{
2108  struct table_elt *relt = 0;
2109  struct table_elt *p, *q;
2110  HOST_WIDE_INT offset;
2111
2112  /* First, is there anything related known?
2113     If we have a table element, we can tell from that.
2114     Otherwise, must look it up.  */
2115
2116  if (elt != 0 && elt->related_value != 0)
2117    relt = elt;
2118  else if (elt == 0 && GET_CODE (x) == CONST)
2119    {
2120      rtx subexp = get_related_value (x);
2121      if (subexp != 0)
2122	relt = lookup (subexp,
2123		       safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2124		       GET_MODE (subexp));
2125    }
2126
2127  if (relt == 0)
2128    return 0;
2129
2130  /* Search all related table entries for one that has an
2131     equivalent register.  */
2132
2133  p = relt;
2134  while (1)
2135    {
2136      /* This loop is strange in that it is executed in two different cases.
2137	 The first is when X is already in the table.  Then it is searching
2138	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2139	 X is not in the table.  Then RELT points to a class for the related
2140	 value.
2141
2142	 Ensure that, whatever case we are in, that we ignore classes that have
2143	 the same value as X.  */
2144
2145      if (rtx_equal_p (x, p->exp))
2146	q = 0;
2147      else
2148	for (q = p->first_same_value; q; q = q->next_same_value)
2149	  if (GET_CODE (q->exp) == REG)
2150	    break;
2151
2152      if (q)
2153	break;
2154
2155      p = p->related_value;
2156
2157      /* We went all the way around, so there is nothing to be found.
2158	 Alternatively, perhaps RELT was in the table for some other reason
2159	 and it has no related values recorded.  */
2160      if (p == relt || p == 0)
2161	break;
2162    }
2163
2164  if (q == 0)
2165    return 0;
2166
2167  offset = (get_integer_term (x) - get_integer_term (p->exp));
2168  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2169  return plus_constant (q->exp, offset);
2170}
2171
2172/* Hash a string.  Just add its bytes up.  */
2173static inline unsigned
2174canon_hash_string (const char *ps)
2175{
2176  unsigned hash = 0;
2177  const unsigned char *p = (const unsigned char *) ps;
2178
2179  if (p)
2180    while (*p)
2181      hash += *p++;
2182
2183  return hash;
2184}
2185
2186/* Hash an rtx.  We are careful to make sure the value is never negative.
2187   Equivalent registers hash identically.
2188   MODE is used in hashing for CONST_INTs only;
2189   otherwise the mode of X is used.
2190
2191   Store 1 in do_not_record if any subexpression is volatile.
2192
2193   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2194   which does not have the RTX_UNCHANGING_P bit set.
2195
2196   Note that cse_insn knows that the hash code of a MEM expression
2197   is just (int) MEM plus the hash code of the address.  */
2198
2199static unsigned
2200canon_hash (rtx x, enum machine_mode mode)
2201{
2202  int i, j;
2203  unsigned hash = 0;
2204  enum rtx_code code;
2205  const char *fmt;
2206
2207  /* repeat is used to turn tail-recursion into iteration.  */
2208 repeat:
2209  if (x == 0)
2210    return hash;
2211
2212  code = GET_CODE (x);
2213  switch (code)
2214    {
2215    case REG:
2216      {
2217	unsigned int regno = REGNO (x);
2218	bool record;
2219
2220	/* On some machines, we can't record any non-fixed hard register,
2221	   because extending its life will cause reload problems.  We
2222	   consider ap, fp, sp, gp to be fixed for this purpose.
2223
2224	   We also consider CCmode registers to be fixed for this purpose;
2225	   failure to do so leads to failure to simplify 0<100 type of
2226	   conditionals.
2227
2228	   On all machines, we can't record any global registers.
2229	   Nor should we record any register that is in a small
2230	   class, as defined by CLASS_LIKELY_SPILLED_P.  */
2231
2232	if (regno >= FIRST_PSEUDO_REGISTER)
2233	  record = true;
2234	else if (x == frame_pointer_rtx
2235		 || x == hard_frame_pointer_rtx
2236		 || x == arg_pointer_rtx
2237		 || x == stack_pointer_rtx
2238		 || x == pic_offset_table_rtx)
2239	  record = true;
2240	else if (global_regs[regno])
2241	  record = false;
2242	else if (fixed_regs[regno])
2243	  record = true;
2244	else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2245	  record = true;
2246	else if (SMALL_REGISTER_CLASSES)
2247	  record = false;
2248	else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2249	  record = false;
2250	else
2251	  record = true;
2252
2253	if (!record)
2254	  {
2255	    do_not_record = 1;
2256	    return 0;
2257	  }
2258
2259	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2260	return hash;
2261      }
2262
2263    /* We handle SUBREG of a REG specially because the underlying
2264       reg changes its hash value with every value change; we don't
2265       want to have to forget unrelated subregs when one subreg changes.  */
2266    case SUBREG:
2267      {
2268	if (GET_CODE (SUBREG_REG (x)) == REG)
2269	  {
2270	    hash += (((unsigned) SUBREG << 7)
2271		     + REGNO (SUBREG_REG (x))
2272		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2273	    return hash;
2274	  }
2275	break;
2276      }
2277
2278    case CONST_INT:
2279      {
2280	unsigned HOST_WIDE_INT tem = INTVAL (x);
2281	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2282	return hash;
2283      }
2284
2285    case CONST_DOUBLE:
2286      /* This is like the general case, except that it only counts
2287	 the integers representing the constant.  */
2288      hash += (unsigned) code + (unsigned) GET_MODE (x);
2289      if (GET_MODE (x) != VOIDmode)
2290	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2291      else
2292	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2293		 + (unsigned) CONST_DOUBLE_HIGH (x));
2294      return hash;
2295
2296    case CONST_VECTOR:
2297      {
2298	int units;
2299	rtx elt;
2300
2301	units = CONST_VECTOR_NUNITS (x);
2302
2303	for (i = 0; i < units; ++i)
2304	  {
2305	    elt = CONST_VECTOR_ELT (x, i);
2306	    hash += canon_hash (elt, GET_MODE (elt));
2307	  }
2308
2309	return hash;
2310      }
2311
2312      /* Assume there is only one rtx object for any given label.  */
2313    case LABEL_REF:
2314      hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2315      return hash;
2316
2317    case SYMBOL_REF:
2318      hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2319      return hash;
2320
2321    case MEM:
2322      /* We don't record if marked volatile or if BLKmode since we don't
2323	 know the size of the move.  */
2324      if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2325	{
2326	  do_not_record = 1;
2327	  return 0;
2328	}
2329      if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2330	hash_arg_in_memory = 1;
2331
2332      /* Now that we have already found this special case,
2333	 might as well speed it up as much as possible.  */
2334      hash += (unsigned) MEM;
2335      x = XEXP (x, 0);
2336      goto repeat;
2337
2338    case USE:
2339      /* A USE that mentions non-volatile memory needs special
2340	 handling since the MEM may be BLKmode which normally
2341	 prevents an entry from being made.  Pure calls are
2342	 marked by a USE which mentions BLKmode memory.  */
2343      if (GET_CODE (XEXP (x, 0)) == MEM
2344	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2345	{
2346	  hash += (unsigned) USE;
2347	  x = XEXP (x, 0);
2348
2349	  if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2350	    hash_arg_in_memory = 1;
2351
2352	  /* Now that we have already found this special case,
2353	     might as well speed it up as much as possible.  */
2354	  hash += (unsigned) MEM;
2355	  x = XEXP (x, 0);
2356	  goto repeat;
2357	}
2358      break;
2359
2360    case PRE_DEC:
2361    case PRE_INC:
2362    case POST_DEC:
2363    case POST_INC:
2364    case PRE_MODIFY:
2365    case POST_MODIFY:
2366    case PC:
2367    case CC0:
2368    case CALL:
2369    case UNSPEC_VOLATILE:
2370      do_not_record = 1;
2371      return 0;
2372
2373    case ASM_OPERANDS:
2374      if (MEM_VOLATILE_P (x))
2375	{
2376	  do_not_record = 1;
2377	  return 0;
2378	}
2379      else
2380	{
2381	  /* We don't want to take the filename and line into account.  */
2382	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2383	    + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2384	    + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2385	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2386
2387	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2388	    {
2389	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2390		{
2391		  hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2392				       GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2393			   + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2394						(x, i)));
2395		}
2396
2397	      hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2398	      x = ASM_OPERANDS_INPUT (x, 0);
2399	      mode = GET_MODE (x);
2400	      goto repeat;
2401	    }
2402
2403	  return hash;
2404	}
2405      break;
2406
2407    default:
2408      break;
2409    }
2410
2411  i = GET_RTX_LENGTH (code) - 1;
2412  hash += (unsigned) code + (unsigned) GET_MODE (x);
2413  fmt = GET_RTX_FORMAT (code);
2414  for (; i >= 0; i--)
2415    {
2416      if (fmt[i] == 'e')
2417	{
2418	  rtx tem = XEXP (x, i);
2419
2420	  /* If we are about to do the last recursive call
2421	     needed at this level, change it into iteration.
2422	     This function  is called enough to be worth it.  */
2423	  if (i == 0)
2424	    {
2425	      x = tem;
2426	      goto repeat;
2427	    }
2428	  hash += canon_hash (tem, 0);
2429	}
2430      else if (fmt[i] == 'E')
2431	for (j = 0; j < XVECLEN (x, i); j++)
2432	  hash += canon_hash (XVECEXP (x, i, j), 0);
2433      else if (fmt[i] == 's')
2434	hash += canon_hash_string (XSTR (x, i));
2435      else if (fmt[i] == 'i')
2436	{
2437	  unsigned tem = XINT (x, i);
2438	  hash += tem;
2439	}
2440      else if (fmt[i] == '0' || fmt[i] == 't')
2441	/* Unused.  */
2442	;
2443      else
2444	abort ();
2445    }
2446  return hash;
2447}
2448
2449/* Like canon_hash but with no side effects.  */
2450
2451static unsigned
2452safe_hash (rtx x, enum machine_mode mode)
2453{
2454  int save_do_not_record = do_not_record;
2455  int save_hash_arg_in_memory = hash_arg_in_memory;
2456  unsigned hash = canon_hash (x, mode);
2457  hash_arg_in_memory = save_hash_arg_in_memory;
2458  do_not_record = save_do_not_record;
2459  return hash;
2460}
2461
2462/* Return 1 iff X and Y would canonicalize into the same thing,
2463   without actually constructing the canonicalization of either one.
2464   If VALIDATE is nonzero,
2465   we assume X is an expression being processed from the rtl
2466   and Y was found in the hash table.  We check register refs
2467   in Y for being marked as valid.
2468
2469   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2470   that is known to be in the register.  Ordinarily, we don't allow them
2471   to match, because letting them match would cause unpredictable results
2472   in all the places that search a hash table chain for an equivalent
2473   for a given value.  A possible equivalent that has different structure
2474   has its hash code computed from different data.  Whether the hash code
2475   is the same as that of the given value is pure luck.  */
2476
2477static int
2478exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
2479{
2480  int i, j;
2481  enum rtx_code code;
2482  const char *fmt;
2483
2484  /* Note: it is incorrect to assume an expression is equivalent to itself
2485     if VALIDATE is nonzero.  */
2486  if (x == y && !validate)
2487    return 1;
2488  if (x == 0 || y == 0)
2489    return x == y;
2490
2491  code = GET_CODE (x);
2492  if (code != GET_CODE (y))
2493    {
2494      if (!equal_values)
2495	return 0;
2496
2497      /* If X is a constant and Y is a register or vice versa, they may be
2498	 equivalent.  We only have to validate if Y is a register.  */
2499      if (CONSTANT_P (x) && GET_CODE (y) == REG
2500	  && REGNO_QTY_VALID_P (REGNO (y)))
2501	{
2502	  int y_q = REG_QTY (REGNO (y));
2503	  struct qty_table_elem *y_ent = &qty_table[y_q];
2504
2505	  if (GET_MODE (y) == y_ent->mode
2506	      && rtx_equal_p (x, y_ent->const_rtx)
2507	      && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2508	    return 1;
2509	}
2510
2511      if (CONSTANT_P (y) && code == REG
2512	  && REGNO_QTY_VALID_P (REGNO (x)))
2513	{
2514	  int x_q = REG_QTY (REGNO (x));
2515	  struct qty_table_elem *x_ent = &qty_table[x_q];
2516
2517	  if (GET_MODE (x) == x_ent->mode
2518	      && rtx_equal_p (y, x_ent->const_rtx))
2519	    return 1;
2520	}
2521
2522      return 0;
2523    }
2524
2525  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2526  if (GET_MODE (x) != GET_MODE (y))
2527    return 0;
2528
2529  switch (code)
2530    {
2531    case PC:
2532    case CC0:
2533    case CONST_INT:
2534      return x == y;
2535
2536    case LABEL_REF:
2537      return XEXP (x, 0) == XEXP (y, 0);
2538
2539    case SYMBOL_REF:
2540      return XSTR (x, 0) == XSTR (y, 0);
2541
2542    case REG:
2543      {
2544	unsigned int regno = REGNO (y);
2545	unsigned int endregno
2546	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2547		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2548	unsigned int i;
2549
2550	/* If the quantities are not the same, the expressions are not
2551	   equivalent.  If there are and we are not to validate, they
2552	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2553
2554	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2555	  return 0;
2556
2557	if (! validate)
2558	  return 1;
2559
2560	for (i = regno; i < endregno; i++)
2561	  if (REG_IN_TABLE (i) != REG_TICK (i))
2562	    return 0;
2563
2564	return 1;
2565      }
2566
2567    /*  For commutative operations, check both orders.  */
2568    case PLUS:
2569    case MULT:
2570    case AND:
2571    case IOR:
2572    case XOR:
2573    case NE:
2574    case EQ:
2575      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2576	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2577			       validate, equal_values))
2578	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2579			       validate, equal_values)
2580		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2581				  validate, equal_values)));
2582
2583    case ASM_OPERANDS:
2584      /* We don't use the generic code below because we want to
2585	 disregard filename and line numbers.  */
2586
2587      /* A volatile asm isn't equivalent to any other.  */
2588      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2589	return 0;
2590
2591      if (GET_MODE (x) != GET_MODE (y)
2592	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2593	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2594		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2595	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2596	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2597	return 0;
2598
2599      if (ASM_OPERANDS_INPUT_LENGTH (x))
2600	{
2601	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2602	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2603			       ASM_OPERANDS_INPUT (y, i),
2604			       validate, equal_values)
2605		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2606			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2607	      return 0;
2608	}
2609
2610      return 1;
2611
2612    default:
2613      break;
2614    }
2615
2616  /* Compare the elements.  If any pair of corresponding elements
2617     fail to match, return 0 for the whole things.  */
2618
2619  fmt = GET_RTX_FORMAT (code);
2620  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2621    {
2622      switch (fmt[i])
2623	{
2624	case 'e':
2625	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2626	    return 0;
2627	  break;
2628
2629	case 'E':
2630	  if (XVECLEN (x, i) != XVECLEN (y, i))
2631	    return 0;
2632	  for (j = 0; j < XVECLEN (x, i); j++)
2633	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2634			       validate, equal_values))
2635	      return 0;
2636	  break;
2637
2638	case 's':
2639	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2640	    return 0;
2641	  break;
2642
2643	case 'i':
2644	  if (XINT (x, i) != XINT (y, i))
2645	    return 0;
2646	  break;
2647
2648	case 'w':
2649	  if (XWINT (x, i) != XWINT (y, i))
2650	    return 0;
2651	  break;
2652
2653	case '0':
2654	case 't':
2655	  break;
2656
2657	default:
2658	  abort ();
2659	}
2660    }
2661
2662  return 1;
2663}
2664
2665/* Return 1 if X has a value that can vary even between two
2666   executions of the program.  0 means X can be compared reliably
2667   against certain constants or near-constants.  */
2668
2669static int
2670cse_rtx_varies_p (rtx x, int from_alias)
2671{
2672  /* We need not check for X and the equivalence class being of the same
2673     mode because if X is equivalent to a constant in some mode, it
2674     doesn't vary in any mode.  */
2675
2676  if (GET_CODE (x) == REG
2677      && REGNO_QTY_VALID_P (REGNO (x)))
2678    {
2679      int x_q = REG_QTY (REGNO (x));
2680      struct qty_table_elem *x_ent = &qty_table[x_q];
2681
2682      if (GET_MODE (x) == x_ent->mode
2683	  && x_ent->const_rtx != NULL_RTX)
2684	return 0;
2685    }
2686
2687  if (GET_CODE (x) == PLUS
2688      && GET_CODE (XEXP (x, 1)) == CONST_INT
2689      && GET_CODE (XEXP (x, 0)) == REG
2690      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2691    {
2692      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2693      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2694
2695      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2696	  && x0_ent->const_rtx != NULL_RTX)
2697	return 0;
2698    }
2699
2700  /* This can happen as the result of virtual register instantiation, if
2701     the initial constant is too large to be a valid address.  This gives
2702     us a three instruction sequence, load large offset into a register,
2703     load fp minus a constant into a register, then a MEM which is the
2704     sum of the two `constant' registers.  */
2705  if (GET_CODE (x) == PLUS
2706      && GET_CODE (XEXP (x, 0)) == REG
2707      && GET_CODE (XEXP (x, 1)) == REG
2708      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2709      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2710    {
2711      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2712      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2713      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2714      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2715
2716      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2717	  && x0_ent->const_rtx != NULL_RTX
2718	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2719	  && x1_ent->const_rtx != NULL_RTX)
2720	return 0;
2721    }
2722
2723  return rtx_varies_p (x, from_alias);
2724}
2725
2726/* Canonicalize an expression:
2727   replace each register reference inside it
2728   with the "oldest" equivalent register.
2729
2730   If INSN is nonzero and we are replacing a pseudo with a hard register
2731   or vice versa, validate_change is used to ensure that INSN remains valid
2732   after we make our substitution.  The calls are made with IN_GROUP nonzero
2733   so apply_change_group must be called upon the outermost return from this
2734   function (unless INSN is zero).  The result of apply_change_group can
2735   generally be discarded since the changes we are making are optional.  */
2736
2737static rtx
2738canon_reg (rtx x, rtx insn)
2739{
2740  int i;
2741  enum rtx_code code;
2742  const char *fmt;
2743
2744  if (x == 0)
2745    return x;
2746
2747  code = GET_CODE (x);
2748  switch (code)
2749    {
2750    case PC:
2751    case CC0:
2752    case CONST:
2753    case CONST_INT:
2754    case CONST_DOUBLE:
2755    case CONST_VECTOR:
2756    case SYMBOL_REF:
2757    case LABEL_REF:
2758    case ADDR_VEC:
2759    case ADDR_DIFF_VEC:
2760      return x;
2761
2762    case REG:
2763      {
2764	int first;
2765	int q;
2766	struct qty_table_elem *ent;
2767
2768	/* Never replace a hard reg, because hard regs can appear
2769	   in more than one machine mode, and we must preserve the mode
2770	   of each occurrence.  Also, some hard regs appear in
2771	   MEMs that are shared and mustn't be altered.  Don't try to
2772	   replace any reg that maps to a reg of class NO_REGS.  */
2773	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2774	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2775	  return x;
2776
2777	q = REG_QTY (REGNO (x));
2778	ent = &qty_table[q];
2779	first = ent->first_reg;
2780	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2781		: REGNO_REG_CLASS (first) == NO_REGS ? x
2782		: gen_rtx_REG (ent->mode, first));
2783      }
2784
2785    default:
2786      break;
2787    }
2788
2789  fmt = GET_RTX_FORMAT (code);
2790  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2791    {
2792      int j;
2793
2794      if (fmt[i] == 'e')
2795	{
2796	  rtx new = canon_reg (XEXP (x, i), insn);
2797	  int insn_code;
2798
2799	  /* If replacing pseudo with hard reg or vice versa, ensure the
2800	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2801	  if (insn != 0 && new != 0
2802	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2803	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2804		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2805		  || (insn_code = recog_memoized (insn)) < 0
2806		  || insn_data[insn_code].n_dups > 0))
2807	    validate_change (insn, &XEXP (x, i), new, 1);
2808	  else
2809	    XEXP (x, i) = new;
2810	}
2811      else if (fmt[i] == 'E')
2812	for (j = 0; j < XVECLEN (x, i); j++)
2813	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2814    }
2815
2816  return x;
2817}
2818
2819/* LOC is a location within INSN that is an operand address (the contents of
2820   a MEM).  Find the best equivalent address to use that is valid for this
2821   insn.
2822
2823   On most CISC machines, complicated address modes are costly, and rtx_cost
2824   is a good approximation for that cost.  However, most RISC machines have
2825   only a few (usually only one) memory reference formats.  If an address is
2826   valid at all, it is often just as cheap as any other address.  Hence, for
2827   RISC machines, we use `address_cost' to compare the costs of various
2828   addresses.  For two addresses of equal cost, choose the one with the
2829   highest `rtx_cost' value as that has the potential of eliminating the
2830   most insns.  For equal costs, we choose the first in the equivalence
2831   class.  Note that we ignore the fact that pseudo registers are cheaper than
2832   hard registers here because we would also prefer the pseudo registers.  */
2833
2834static void
2835find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2836{
2837  struct table_elt *elt;
2838  rtx addr = *loc;
2839  struct table_elt *p;
2840  int found_better = 1;
2841  int save_do_not_record = do_not_record;
2842  int save_hash_arg_in_memory = hash_arg_in_memory;
2843  int addr_volatile;
2844  int regno;
2845  unsigned hash;
2846
2847  /* Do not try to replace constant addresses or addresses of local and
2848     argument slots.  These MEM expressions are made only once and inserted
2849     in many instructions, as well as being used to control symbol table
2850     output.  It is not safe to clobber them.
2851
2852     There are some uncommon cases where the address is already in a register
2853     for some reason, but we cannot take advantage of that because we have
2854     no easy way to unshare the MEM.  In addition, looking up all stack
2855     addresses is costly.  */
2856  if ((GET_CODE (addr) == PLUS
2857       && GET_CODE (XEXP (addr, 0)) == REG
2858       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2859       && (regno = REGNO (XEXP (addr, 0)),
2860	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2861	   || regno == ARG_POINTER_REGNUM))
2862      || (GET_CODE (addr) == REG
2863	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2864	      || regno == HARD_FRAME_POINTER_REGNUM
2865	      || regno == ARG_POINTER_REGNUM))
2866      || GET_CODE (addr) == ADDRESSOF
2867      || CONSTANT_ADDRESS_P (addr))
2868    return;
2869
2870  /* If this address is not simply a register, try to fold it.  This will
2871     sometimes simplify the expression.  Many simplifications
2872     will not be valid, but some, usually applying the associative rule, will
2873     be valid and produce better code.  */
2874  if (GET_CODE (addr) != REG)
2875    {
2876      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2877      int addr_folded_cost = address_cost (folded, mode);
2878      int addr_cost = address_cost (addr, mode);
2879
2880      if ((addr_folded_cost < addr_cost
2881	   || (addr_folded_cost == addr_cost
2882	       /* ??? The rtx_cost comparison is left over from an older
2883		  version of this code.  It is probably no longer helpful.  */
2884	       && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2885		   || approx_reg_cost (folded) < approx_reg_cost (addr))))
2886	  && validate_change (insn, loc, folded, 0))
2887	addr = folded;
2888    }
2889
2890  /* If this address is not in the hash table, we can't look for equivalences
2891     of the whole address.  Also, ignore if volatile.  */
2892
2893  do_not_record = 0;
2894  hash = HASH (addr, Pmode);
2895  addr_volatile = do_not_record;
2896  do_not_record = save_do_not_record;
2897  hash_arg_in_memory = save_hash_arg_in_memory;
2898
2899  if (addr_volatile)
2900    return;
2901
2902  elt = lookup (addr, hash, Pmode);
2903
2904  if (elt)
2905    {
2906      /* We need to find the best (under the criteria documented above) entry
2907	 in the class that is valid.  We use the `flag' field to indicate
2908	 choices that were invalid and iterate until we can't find a better
2909	 one that hasn't already been tried.  */
2910
2911      for (p = elt->first_same_value; p; p = p->next_same_value)
2912	p->flag = 0;
2913
2914      while (found_better)
2915	{
2916	  int best_addr_cost = address_cost (*loc, mode);
2917	  int best_rtx_cost = (elt->cost + 1) >> 1;
2918	  int exp_cost;
2919	  struct table_elt *best_elt = elt;
2920
2921	  found_better = 0;
2922	  for (p = elt->first_same_value; p; p = p->next_same_value)
2923	    if (! p->flag)
2924	      {
2925		if ((GET_CODE (p->exp) == REG
2926		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2927		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2928			|| (exp_cost == best_addr_cost
2929			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
2930		  {
2931		    found_better = 1;
2932		    best_addr_cost = exp_cost;
2933		    best_rtx_cost = (p->cost + 1) >> 1;
2934		    best_elt = p;
2935		  }
2936	      }
2937
2938	  if (found_better)
2939	    {
2940	      if (validate_change (insn, loc,
2941				   canon_reg (copy_rtx (best_elt->exp),
2942					      NULL_RTX), 0))
2943		return;
2944	      else
2945		best_elt->flag = 1;
2946	    }
2947	}
2948    }
2949
2950  /* If the address is a binary operation with the first operand a register
2951     and the second a constant, do the same as above, but looking for
2952     equivalences of the register.  Then try to simplify before checking for
2953     the best address to use.  This catches a few cases:  First is when we
2954     have REG+const and the register is another REG+const.  We can often merge
2955     the constants and eliminate one insn and one register.  It may also be
2956     that a machine has a cheap REG+REG+const.  Finally, this improves the
2957     code on the Alpha for unaligned byte stores.  */
2958
2959  if (flag_expensive_optimizations
2960      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2961	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2962      && GET_CODE (XEXP (*loc, 0)) == REG)
2963    {
2964      rtx op1 = XEXP (*loc, 1);
2965
2966      do_not_record = 0;
2967      hash = HASH (XEXP (*loc, 0), Pmode);
2968      do_not_record = save_do_not_record;
2969      hash_arg_in_memory = save_hash_arg_in_memory;
2970
2971      elt = lookup (XEXP (*loc, 0), hash, Pmode);
2972      if (elt == 0)
2973	return;
2974
2975      /* We need to find the best (under the criteria documented above) entry
2976	 in the class that is valid.  We use the `flag' field to indicate
2977	 choices that were invalid and iterate until we can't find a better
2978	 one that hasn't already been tried.  */
2979
2980      for (p = elt->first_same_value; p; p = p->next_same_value)
2981	p->flag = 0;
2982
2983      while (found_better)
2984	{
2985	  int best_addr_cost = address_cost (*loc, mode);
2986	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
2987	  struct table_elt *best_elt = elt;
2988	  rtx best_rtx = *loc;
2989	  int count;
2990
2991	  /* This is at worst case an O(n^2) algorithm, so limit our search
2992	     to the first 32 elements on the list.  This avoids trouble
2993	     compiling code with very long basic blocks that can easily
2994	     call simplify_gen_binary so many times that we run out of
2995	     memory.  */
2996
2997	  found_better = 0;
2998	  for (p = elt->first_same_value, count = 0;
2999	       p && count < 32;
3000	       p = p->next_same_value, count++)
3001	    if (! p->flag
3002		&& (GET_CODE (p->exp) == REG
3003		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3004	      {
3005		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3006					       p->exp, op1);
3007		int new_cost;
3008		new_cost = address_cost (new, mode);
3009
3010		if (new_cost < best_addr_cost
3011		    || (new_cost == best_addr_cost
3012			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3013		  {
3014		    found_better = 1;
3015		    best_addr_cost = new_cost;
3016		    best_rtx_cost = (COST (new) + 1) >> 1;
3017		    best_elt = p;
3018		    best_rtx = new;
3019		  }
3020	      }
3021
3022	  if (found_better)
3023	    {
3024	      if (validate_change (insn, loc,
3025				   canon_reg (copy_rtx (best_rtx),
3026					      NULL_RTX), 0))
3027		return;
3028	      else
3029		best_elt->flag = 1;
3030	    }
3031	}
3032    }
3033}
3034
3035/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3036   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3037   what values are being compared.
3038
3039   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3040   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3041   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3042   compared to produce cc0.
3043
3044   The return value is the comparison operator and is either the code of
3045   A or the code corresponding to the inverse of the comparison.  */
3046
3047static enum rtx_code
3048find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3049		      enum machine_mode *pmode1, enum machine_mode *pmode2)
3050{
3051  rtx arg1, arg2;
3052
3053  arg1 = *parg1, arg2 = *parg2;
3054
3055  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3056
3057  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3058    {
3059      /* Set nonzero when we find something of interest.  */
3060      rtx x = 0;
3061      int reverse_code = 0;
3062      struct table_elt *p = 0;
3063
3064      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3065	 On machines with CC0, this is the only case that can occur, since
3066	 fold_rtx will return the COMPARE or item being compared with zero
3067	 when given CC0.  */
3068
3069      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3070	x = arg1;
3071
3072      /* If ARG1 is a comparison operator and CODE is testing for
3073	 STORE_FLAG_VALUE, get the inner arguments.  */
3074
3075      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3076	{
3077#ifdef FLOAT_STORE_FLAG_VALUE
3078	  REAL_VALUE_TYPE fsfv;
3079#endif
3080
3081	  if (code == NE
3082	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3083		  && code == LT && STORE_FLAG_VALUE == -1)
3084#ifdef FLOAT_STORE_FLAG_VALUE
3085	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3086		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3087		      REAL_VALUE_NEGATIVE (fsfv)))
3088#endif
3089	      )
3090	    x = arg1;
3091	  else if (code == EQ
3092		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3093		       && code == GE && STORE_FLAG_VALUE == -1)
3094#ifdef FLOAT_STORE_FLAG_VALUE
3095		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3096		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3097			   REAL_VALUE_NEGATIVE (fsfv)))
3098#endif
3099		   )
3100	    x = arg1, reverse_code = 1;
3101	}
3102
3103      /* ??? We could also check for
3104
3105	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3106
3107	 and related forms, but let's wait until we see them occurring.  */
3108
3109      if (x == 0)
3110	/* Look up ARG1 in the hash table and see if it has an equivalence
3111	   that lets us see what is being compared.  */
3112	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3113		    GET_MODE (arg1));
3114      if (p)
3115	{
3116	  p = p->first_same_value;
3117
3118	  /* If what we compare is already known to be constant, that is as
3119	     good as it gets.
3120	     We need to break the loop in this case, because otherwise we
3121	     can have an infinite loop when looking at a reg that is known
3122	     to be a constant which is the same as a comparison of a reg
3123	     against zero which appears later in the insn stream, which in
3124	     turn is constant and the same as the comparison of the first reg
3125	     against zero...  */
3126	  if (p->is_const)
3127	    break;
3128	}
3129
3130      for (; p; p = p->next_same_value)
3131	{
3132	  enum machine_mode inner_mode = GET_MODE (p->exp);
3133#ifdef FLOAT_STORE_FLAG_VALUE
3134	  REAL_VALUE_TYPE fsfv;
3135#endif
3136
3137	  /* If the entry isn't valid, skip it.  */
3138	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3139	    continue;
3140
3141	  if (GET_CODE (p->exp) == COMPARE
3142	      /* Another possibility is that this machine has a compare insn
3143		 that includes the comparison code.  In that case, ARG1 would
3144		 be equivalent to a comparison operation that would set ARG1 to
3145		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3146		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3147		 we must reverse ORIG_CODE.  On machine with a negative value
3148		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3149	      || ((code == NE
3150		   || (code == LT
3151		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3152		       && (GET_MODE_BITSIZE (inner_mode)
3153			   <= HOST_BITS_PER_WIDE_INT)
3154		       && (STORE_FLAG_VALUE
3155			   & ((HOST_WIDE_INT) 1
3156			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3157#ifdef FLOAT_STORE_FLAG_VALUE
3158		   || (code == LT
3159		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3160		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3161			   REAL_VALUE_NEGATIVE (fsfv)))
3162#endif
3163		   )
3164		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3165	    {
3166	      x = p->exp;
3167	      break;
3168	    }
3169	  else if ((code == EQ
3170		    || (code == GE
3171			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3172			&& (GET_MODE_BITSIZE (inner_mode)
3173			    <= HOST_BITS_PER_WIDE_INT)
3174			&& (STORE_FLAG_VALUE
3175			    & ((HOST_WIDE_INT) 1
3176			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3177#ifdef FLOAT_STORE_FLAG_VALUE
3178		    || (code == GE
3179			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3180			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3181			    REAL_VALUE_NEGATIVE (fsfv)))
3182#endif
3183		    )
3184		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3185	    {
3186	      reverse_code = 1;
3187	      x = p->exp;
3188	      break;
3189	    }
3190
3191	  /* If this non-trapping address, e.g. fp + constant, the
3192	     equivalent is a better operand since it may let us predict
3193	     the value of the comparison.  */
3194	  else if (!rtx_addr_can_trap_p (p->exp))
3195	    {
3196	      arg1 = p->exp;
3197	      continue;
3198	    }
3199	}
3200
3201      /* If we didn't find a useful equivalence for ARG1, we are done.
3202	 Otherwise, set up for the next iteration.  */
3203      if (x == 0)
3204	break;
3205
3206      /* If we need to reverse the comparison, make sure that that is
3207	 possible -- we can't necessarily infer the value of GE from LT
3208	 with floating-point operands.  */
3209      if (reverse_code)
3210	{
3211	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3212	  if (reversed == UNKNOWN)
3213	    break;
3214	  else
3215	    code = reversed;
3216	}
3217      else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3218	code = GET_CODE (x);
3219      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3220    }
3221
3222  /* Return our results.  Return the modes from before fold_rtx
3223     because fold_rtx might produce const_int, and then it's too late.  */
3224  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3225  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3226
3227  return code;
3228}
3229
3230/* If X is a nontrivial arithmetic operation on an argument
3231   for which a constant value can be determined, return
3232   the result of operating on that value, as a constant.
3233   Otherwise, return X, possibly with one or more operands
3234   modified by recursive calls to this function.
3235
3236   If X is a register whose contents are known, we do NOT
3237   return those contents here.  equiv_constant is called to
3238   perform that task.
3239
3240   INSN is the insn that we may be modifying.  If it is 0, make a copy
3241   of X before modifying it.  */
3242
3243static rtx
3244fold_rtx (rtx x, rtx insn)
3245{
3246  enum rtx_code code;
3247  enum machine_mode mode;
3248  const char *fmt;
3249  int i;
3250  rtx new = 0;
3251  int copied = 0;
3252  int must_swap = 0;
3253
3254  /* Folded equivalents of first two operands of X.  */
3255  rtx folded_arg0;
3256  rtx folded_arg1;
3257
3258  /* Constant equivalents of first three operands of X;
3259     0 when no such equivalent is known.  */
3260  rtx const_arg0;
3261  rtx const_arg1;
3262  rtx const_arg2;
3263
3264  /* The mode of the first operand of X.  We need this for sign and zero
3265     extends.  */
3266  enum machine_mode mode_arg0;
3267
3268  if (x == 0)
3269    return x;
3270
3271  mode = GET_MODE (x);
3272  code = GET_CODE (x);
3273  switch (code)
3274    {
3275    case CONST:
3276    case CONST_INT:
3277    case CONST_DOUBLE:
3278    case CONST_VECTOR:
3279    case SYMBOL_REF:
3280    case LABEL_REF:
3281    case REG:
3282      /* No use simplifying an EXPR_LIST
3283	 since they are used only for lists of args
3284	 in a function call's REG_EQUAL note.  */
3285    case EXPR_LIST:
3286      /* Changing anything inside an ADDRESSOF is incorrect; we don't
3287	 want to (e.g.,) make (addressof (const_int 0)) just because
3288	 the location is known to be zero.  */
3289    case ADDRESSOF:
3290      return x;
3291
3292#ifdef HAVE_cc0
3293    case CC0:
3294      return prev_insn_cc0;
3295#endif
3296
3297    case PC:
3298      /* If the next insn is a CODE_LABEL followed by a jump table,
3299	 PC's value is a LABEL_REF pointing to that label.  That
3300	 lets us fold switch statements on the VAX.  */
3301      {
3302	rtx next;
3303	if (insn && tablejump_p (insn, &next, NULL))
3304	  return gen_rtx_LABEL_REF (Pmode, next);
3305      }
3306      break;
3307
3308    case SUBREG:
3309      /* See if we previously assigned a constant value to this SUBREG.  */
3310      if ((new = lookup_as_function (x, CONST_INT)) != 0
3311	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3312	return new;
3313
3314      /* If this is a paradoxical SUBREG, we have no idea what value the
3315	 extra bits would have.  However, if the operand is equivalent
3316	 to a SUBREG whose operand is the same as our mode, and all the
3317	 modes are within a word, we can just use the inner operand
3318	 because these SUBREGs just say how to treat the register.
3319
3320	 Similarly if we find an integer constant.  */
3321
3322      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3323	{
3324	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3325	  struct table_elt *elt;
3326
3327	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3328	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3329	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3330				imode)) != 0)
3331	    for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3332	      {
3333		if (CONSTANT_P (elt->exp)
3334		    && GET_MODE (elt->exp) == VOIDmode)
3335		  return elt->exp;
3336
3337		if (GET_CODE (elt->exp) == SUBREG
3338		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
3339		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3340		  return copy_rtx (SUBREG_REG (elt->exp));
3341	      }
3342
3343	  return x;
3344	}
3345
3346      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
3347	 We might be able to if the SUBREG is extracting a single word in an
3348	 integral mode or extracting the low part.  */
3349
3350      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3351      const_arg0 = equiv_constant (folded_arg0);
3352      if (const_arg0)
3353	folded_arg0 = const_arg0;
3354
3355      if (folded_arg0 != SUBREG_REG (x))
3356	{
3357	  new = simplify_subreg (mode, folded_arg0,
3358				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3359	  if (new)
3360	    return new;
3361	}
3362
3363      /* If this is a narrowing SUBREG and our operand is a REG, see if
3364	 we can find an equivalence for REG that is an arithmetic operation
3365	 in a wider mode where both operands are paradoxical SUBREGs
3366	 from objects of our result mode.  In that case, we couldn't report
3367	 an equivalent value for that operation, since we don't know what the
3368	 extra bits will be.  But we can find an equivalence for this SUBREG
3369	 by folding that operation is the narrow mode.  This allows us to
3370	 fold arithmetic in narrow modes when the machine only supports
3371	 word-sized arithmetic.
3372
3373	 Also look for a case where we have a SUBREG whose operand is the
3374	 same as our result.  If both modes are smaller than a word, we
3375	 are simply interpreting a register in different modes and we
3376	 can use the inner value.  */
3377
3378      if (GET_CODE (folded_arg0) == REG
3379	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3380	  && subreg_lowpart_p (x))
3381	{
3382	  struct table_elt *elt;
3383
3384	  /* We can use HASH here since we know that canon_hash won't be
3385	     called.  */
3386	  elt = lookup (folded_arg0,
3387			HASH (folded_arg0, GET_MODE (folded_arg0)),
3388			GET_MODE (folded_arg0));
3389
3390	  if (elt)
3391	    elt = elt->first_same_value;
3392
3393	  for (; elt; elt = elt->next_same_value)
3394	    {
3395	      enum rtx_code eltcode = GET_CODE (elt->exp);
3396
3397	      /* Just check for unary and binary operations.  */
3398	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3399		  && GET_CODE (elt->exp) != SIGN_EXTEND
3400		  && GET_CODE (elt->exp) != ZERO_EXTEND
3401		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3402		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3403		  && (GET_MODE_CLASS (mode)
3404		      == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3405		{
3406		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3407
3408		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3409		    op0 = fold_rtx (op0, NULL_RTX);
3410
3411		  op0 = equiv_constant (op0);
3412		  if (op0)
3413		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3414						    op0, mode);
3415		}
3416	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3417			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3418		       && eltcode != DIV && eltcode != MOD
3419		       && eltcode != UDIV && eltcode != UMOD
3420		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3421		       && eltcode != ROTATE && eltcode != ROTATERT
3422		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3423			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3424				== mode))
3425			   || CONSTANT_P (XEXP (elt->exp, 0)))
3426		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3427			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3428				== mode))
3429			   || CONSTANT_P (XEXP (elt->exp, 1))))
3430		{
3431		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3432		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3433
3434		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3435		    op0 = fold_rtx (op0, NULL_RTX);
3436
3437		  if (op0)
3438		    op0 = equiv_constant (op0);
3439
3440		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3441		    op1 = fold_rtx (op1, NULL_RTX);
3442
3443		  if (op1)
3444		    op1 = equiv_constant (op1);
3445
3446		  /* If we are looking for the low SImode part of
3447		     (ashift:DI c (const_int 32)), it doesn't work
3448		     to compute that in SImode, because a 32-bit shift
3449		     in SImode is unpredictable.  We know the value is 0.  */
3450		  if (op0 && op1
3451		      && GET_CODE (elt->exp) == ASHIFT
3452		      && GET_CODE (op1) == CONST_INT
3453		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3454		    {
3455		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3456
3457			/* If the count fits in the inner mode's width,
3458			   but exceeds the outer mode's width,
3459			   the value will get truncated to 0
3460			   by the subreg.  */
3461			new = const0_rtx;
3462		      else
3463			/* If the count exceeds even the inner mode's width,
3464			   don't fold this expression.  */
3465			new = 0;
3466		    }
3467		  else if (op0 && op1)
3468		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3469						     op0, op1);
3470		}
3471
3472	      else if (GET_CODE (elt->exp) == SUBREG
3473		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
3474		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3475			   <= UNITS_PER_WORD)
3476		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3477		new = copy_rtx (SUBREG_REG (elt->exp));
3478
3479	      if (new)
3480		return new;
3481	    }
3482	}
3483
3484      return x;
3485
3486    case NOT:
3487    case NEG:
3488      /* If we have (NOT Y), see if Y is known to be (NOT Z).
3489	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3490      new = lookup_as_function (XEXP (x, 0), code);
3491      if (new)
3492	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3493      break;
3494
3495    case MEM:
3496      /* If we are not actually processing an insn, don't try to find the
3497	 best address.  Not only don't we care, but we could modify the
3498	 MEM in an invalid way since we have no insn to validate against.  */
3499      if (insn != 0)
3500	find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3501
3502      {
3503	/* Even if we don't fold in the insn itself,
3504	   we can safely do so here, in hopes of getting a constant.  */
3505	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3506	rtx base = 0;
3507	HOST_WIDE_INT offset = 0;
3508
3509	if (GET_CODE (addr) == REG
3510	    && REGNO_QTY_VALID_P (REGNO (addr)))
3511	  {
3512	    int addr_q = REG_QTY (REGNO (addr));
3513	    struct qty_table_elem *addr_ent = &qty_table[addr_q];
3514
3515	    if (GET_MODE (addr) == addr_ent->mode
3516		&& addr_ent->const_rtx != NULL_RTX)
3517	      addr = addr_ent->const_rtx;
3518	  }
3519
3520	/* If address is constant, split it into a base and integer offset.  */
3521	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3522	  base = addr;
3523	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3524		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3525	  {
3526	    base = XEXP (XEXP (addr, 0), 0);
3527	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3528	  }
3529	else if (GET_CODE (addr) == LO_SUM
3530		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3531	  base = XEXP (addr, 1);
3532	else if (GET_CODE (addr) == ADDRESSOF)
3533	  return change_address (x, VOIDmode, addr);
3534
3535	/* If this is a constant pool reference, we can fold it into its
3536	   constant to allow better value tracking.  */
3537	if (base && GET_CODE (base) == SYMBOL_REF
3538	    && CONSTANT_POOL_ADDRESS_P (base))
3539	  {
3540	    rtx constant = get_pool_constant (base);
3541	    enum machine_mode const_mode = get_pool_mode (base);
3542	    rtx new;
3543
3544	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3545	      {
3546		constant_pool_entries_cost = COST (constant);
3547		constant_pool_entries_regcost = approx_reg_cost (constant);
3548	      }
3549
3550	    /* If we are loading the full constant, we have an equivalence.  */
3551	    if (offset == 0 && mode == const_mode)
3552	      return constant;
3553
3554	    /* If this actually isn't a constant (weird!), we can't do
3555	       anything.  Otherwise, handle the two most common cases:
3556	       extracting a word from a multi-word constant, and extracting
3557	       the low-order bits.  Other cases don't seem common enough to
3558	       worry about.  */
3559	    if (! CONSTANT_P (constant))
3560	      return x;
3561
3562	    if (GET_MODE_CLASS (mode) == MODE_INT
3563		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
3564		&& offset % UNITS_PER_WORD == 0
3565		&& (new = operand_subword (constant,
3566					   offset / UNITS_PER_WORD,
3567					   0, const_mode)) != 0)
3568	      return new;
3569
3570	    if (((BYTES_BIG_ENDIAN
3571		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3572		 || (! BYTES_BIG_ENDIAN && offset == 0))
3573		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
3574	      return new;
3575	  }
3576
3577	/* If this is a reference to a label at a known position in a jump
3578	   table, we also know its value.  */
3579	if (base && GET_CODE (base) == LABEL_REF)
3580	  {
3581	    rtx label = XEXP (base, 0);
3582	    rtx table_insn = NEXT_INSN (label);
3583
3584	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3585		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3586	      {
3587		rtx table = PATTERN (table_insn);
3588
3589		if (offset >= 0
3590		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3591			< XVECLEN (table, 0)))
3592		  return XVECEXP (table, 0,
3593				  offset / GET_MODE_SIZE (GET_MODE (table)));
3594	      }
3595	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3596		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3597	      {
3598		rtx table = PATTERN (table_insn);
3599
3600		if (offset >= 0
3601		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3602			< XVECLEN (table, 1)))
3603		  {
3604		    offset /= GET_MODE_SIZE (GET_MODE (table));
3605		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3606					 XEXP (table, 0));
3607
3608		    if (GET_MODE (table) != Pmode)
3609		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3610
3611		    /* Indicate this is a constant.  This isn't a
3612		       valid form of CONST, but it will only be used
3613		       to fold the next insns and then discarded, so
3614		       it should be safe.
3615
3616		       Note this expression must be explicitly discarded,
3617		       by cse_insn, else it may end up in a REG_EQUAL note
3618		       and "escape" to cause problems elsewhere.  */
3619		    return gen_rtx_CONST (GET_MODE (new), new);
3620		  }
3621	      }
3622	  }
3623
3624	return x;
3625      }
3626
3627#ifdef NO_FUNCTION_CSE
3628    case CALL:
3629      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3630	return x;
3631      break;
3632#endif
3633
3634    case ASM_OPERANDS:
3635      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3636	validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3637			 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3638      break;
3639
3640    default:
3641      break;
3642    }
3643
3644  const_arg0 = 0;
3645  const_arg1 = 0;
3646  const_arg2 = 0;
3647  mode_arg0 = VOIDmode;
3648
3649  /* Try folding our operands.
3650     Then see which ones have constant values known.  */
3651
3652  fmt = GET_RTX_FORMAT (code);
3653  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3654    if (fmt[i] == 'e')
3655      {
3656	rtx arg = XEXP (x, i);
3657	rtx folded_arg = arg, const_arg = 0;
3658	enum machine_mode mode_arg = GET_MODE (arg);
3659	rtx cheap_arg, expensive_arg;
3660	rtx replacements[2];
3661	int j;
3662	int old_cost = COST_IN (XEXP (x, i), code);
3663
3664	/* Most arguments are cheap, so handle them specially.  */
3665	switch (GET_CODE (arg))
3666	  {
3667	  case REG:
3668	    /* This is the same as calling equiv_constant; it is duplicated
3669	       here for speed.  */
3670	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3671	      {
3672		int arg_q = REG_QTY (REGNO (arg));
3673		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3674
3675		if (arg_ent->const_rtx != NULL_RTX
3676		    && GET_CODE (arg_ent->const_rtx) != REG
3677		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3678		  const_arg
3679		    = gen_lowpart_if_possible (GET_MODE (arg),
3680					       arg_ent->const_rtx);
3681	      }
3682	    break;
3683
3684	  case CONST:
3685	  case CONST_INT:
3686	  case SYMBOL_REF:
3687	  case LABEL_REF:
3688	  case CONST_DOUBLE:
3689	  case CONST_VECTOR:
3690	    const_arg = arg;
3691	    break;
3692
3693#ifdef HAVE_cc0
3694	  case CC0:
3695	    folded_arg = prev_insn_cc0;
3696	    mode_arg = prev_insn_cc0_mode;
3697	    const_arg = equiv_constant (folded_arg);
3698	    break;
3699#endif
3700
3701	  default:
3702	    folded_arg = fold_rtx (arg, insn);
3703	    const_arg = equiv_constant (folded_arg);
3704	  }
3705
3706	/* For the first three operands, see if the operand
3707	   is constant or equivalent to a constant.  */
3708	switch (i)
3709	  {
3710	  case 0:
3711	    folded_arg0 = folded_arg;
3712	    const_arg0 = const_arg;
3713	    mode_arg0 = mode_arg;
3714	    break;
3715	  case 1:
3716	    folded_arg1 = folded_arg;
3717	    const_arg1 = const_arg;
3718	    break;
3719	  case 2:
3720	    const_arg2 = const_arg;
3721	    break;
3722	  }
3723
3724	/* Pick the least expensive of the folded argument and an
3725	   equivalent constant argument.  */
3726	if (const_arg == 0 || const_arg == folded_arg
3727	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3728	  cheap_arg = folded_arg, expensive_arg = const_arg;
3729	else
3730	  cheap_arg = const_arg, expensive_arg = folded_arg;
3731
3732	/* Try to replace the operand with the cheapest of the two
3733	   possibilities.  If it doesn't work and this is either of the first
3734	   two operands of a commutative operation, try swapping them.
3735	   If THAT fails, try the more expensive, provided it is cheaper
3736	   than what is already there.  */
3737
3738	if (cheap_arg == XEXP (x, i))
3739	  continue;
3740
3741	if (insn == 0 && ! copied)
3742	  {
3743	    x = copy_rtx (x);
3744	    copied = 1;
3745	  }
3746
3747	/* Order the replacements from cheapest to most expensive.  */
3748	replacements[0] = cheap_arg;
3749	replacements[1] = expensive_arg;
3750
3751	for (j = 0; j < 2 && replacements[j]; j++)
3752	  {
3753	    int new_cost = COST_IN (replacements[j], code);
3754
3755	    /* Stop if what existed before was cheaper.  Prefer constants
3756	       in the case of a tie.  */
3757	    if (new_cost > old_cost
3758		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3759	      break;
3760
3761	    /* It's not safe to substitute the operand of a conversion
3762	       operator with a constant, as the conversion's identity
3763	       depends upon the mode of it's operand.  This optimization
3764	       is handled by the call to simplify_unary_operation.  */
3765	    if (GET_RTX_CLASS (code) == '1'
3766		&& GET_MODE (replacements[j]) != mode_arg0
3767		&& (code == ZERO_EXTEND
3768		    || code == SIGN_EXTEND
3769		    || code == TRUNCATE
3770		    || code == FLOAT_TRUNCATE
3771		    || code == FLOAT_EXTEND
3772		    || code == FLOAT
3773		    || code == FIX
3774		    || code == UNSIGNED_FLOAT
3775		    || code == UNSIGNED_FIX))
3776	      continue;
3777
3778	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3779	      break;
3780
3781	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3782		|| code == LTGT || code == UNEQ || code == ORDERED
3783		|| code == UNORDERED)
3784	      {
3785		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3786		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3787
3788		if (apply_change_group ())
3789		  {
3790		    /* Swap them back to be invalid so that this loop can
3791		       continue and flag them to be swapped back later.  */
3792		    rtx tem;
3793
3794		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3795				       XEXP (x, 1) = tem;
3796		    must_swap = 1;
3797		    break;
3798		  }
3799	      }
3800	  }
3801      }
3802
3803    else
3804      {
3805	if (fmt[i] == 'E')
3806	  /* Don't try to fold inside of a vector of expressions.
3807	     Doing nothing is harmless.  */
3808	  {;}
3809      }
3810
3811  /* If a commutative operation, place a constant integer as the second
3812     operand unless the first operand is also a constant integer.  Otherwise,
3813     place any constant second unless the first operand is also a constant.  */
3814
3815  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3816      || code == LTGT || code == UNEQ || code == ORDERED
3817      || code == UNORDERED)
3818    {
3819      if (must_swap
3820	  || swap_commutative_operands_p (const_arg0 ? const_arg0
3821						     : XEXP (x, 0),
3822					  const_arg1 ? const_arg1
3823						     : XEXP (x, 1)))
3824	{
3825	  rtx tem = XEXP (x, 0);
3826
3827	  if (insn == 0 && ! copied)
3828	    {
3829	      x = copy_rtx (x);
3830	      copied = 1;
3831	    }
3832
3833	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3834	  validate_change (insn, &XEXP (x, 1), tem, 1);
3835	  if (apply_change_group ())
3836	    {
3837	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3838	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3839	    }
3840	}
3841    }
3842
3843  /* If X is an arithmetic operation, see if we can simplify it.  */
3844
3845  switch (GET_RTX_CLASS (code))
3846    {
3847    case '1':
3848      {
3849	int is_const = 0;
3850
3851	/* We can't simplify extension ops unless we know the
3852	   original mode.  */
3853	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3854	    && mode_arg0 == VOIDmode)
3855	  break;
3856
3857	/* If we had a CONST, strip it off and put it back later if we
3858	   fold.  */
3859	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3860	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3861
3862	new = simplify_unary_operation (code, mode,
3863					const_arg0 ? const_arg0 : folded_arg0,
3864					mode_arg0);
3865	if (new != 0 && is_const)
3866	  new = gen_rtx_CONST (mode, new);
3867      }
3868      break;
3869
3870    case '<':
3871      /* See what items are actually being compared and set FOLDED_ARG[01]
3872	 to those values and CODE to the actual comparison code.  If any are
3873	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3874	 do anything if both operands are already known to be constant.  */
3875
3876      if (const_arg0 == 0 || const_arg1 == 0)
3877	{
3878	  struct table_elt *p0, *p1;
3879	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3880	  enum machine_mode mode_arg1;
3881
3882#ifdef FLOAT_STORE_FLAG_VALUE
3883	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3884	    {
3885	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3886			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3887	      false_rtx = CONST0_RTX (mode);
3888	    }
3889#endif
3890
3891	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3892				       &mode_arg0, &mode_arg1);
3893	  const_arg0 = equiv_constant (folded_arg0);
3894	  const_arg1 = equiv_constant (folded_arg1);
3895
3896	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3897	     what kinds of things are being compared, so we can't do
3898	     anything with this comparison.  */
3899
3900	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3901	    break;
3902
3903	  /* If we do not now have two constants being compared, see
3904	     if we can nevertheless deduce some things about the
3905	     comparison.  */
3906	  if (const_arg0 == 0 || const_arg1 == 0)
3907	    {
3908	      /* Some addresses are known to be nonzero.  We don't know
3909		 their sign, but equality comparisons are known.  */
3910	      if (const_arg1 == const0_rtx
3911		  && nonzero_address_p (folded_arg0))
3912		{
3913		  if (code == EQ)
3914		    return false_rtx;
3915		  else if (code == NE)
3916		    return true_rtx;
3917		}
3918
3919	      /* See if the two operands are the same.  */
3920
3921	      if (folded_arg0 == folded_arg1
3922		  || (GET_CODE (folded_arg0) == REG
3923		      && GET_CODE (folded_arg1) == REG
3924		      && (REG_QTY (REGNO (folded_arg0))
3925			  == REG_QTY (REGNO (folded_arg1))))
3926		  || ((p0 = lookup (folded_arg0,
3927				    (safe_hash (folded_arg0, mode_arg0)
3928				     & HASH_MASK), mode_arg0))
3929		      && (p1 = lookup (folded_arg1,
3930				       (safe_hash (folded_arg1, mode_arg0)
3931					& HASH_MASK), mode_arg0))
3932		      && p0->first_same_value == p1->first_same_value))
3933		{
3934		  /* Sadly two equal NaNs are not equivalent.  */
3935		  if (!HONOR_NANS (mode_arg0))
3936		    return ((code == EQ || code == LE || code == GE
3937			     || code == LEU || code == GEU || code == UNEQ
3938			     || code == UNLE || code == UNGE
3939			     || code == ORDERED)
3940			    ? true_rtx : false_rtx);
3941		  /* Take care for the FP compares we can resolve.  */
3942		  if (code == UNEQ || code == UNLE || code == UNGE)
3943		    return true_rtx;
3944		  if (code == LTGT || code == LT || code == GT)
3945		    return false_rtx;
3946		}
3947
3948	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3949		 doing now is either the same as we did before or the reverse
3950		 (we only check the reverse if not floating-point).  */
3951	      else if (GET_CODE (folded_arg0) == REG)
3952		{
3953		  int qty = REG_QTY (REGNO (folded_arg0));
3954
3955		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3956		    {
3957		      struct qty_table_elem *ent = &qty_table[qty];
3958
3959		      if ((comparison_dominates_p (ent->comparison_code, code)
3960			   || (! FLOAT_MODE_P (mode_arg0)
3961			       && comparison_dominates_p (ent->comparison_code,
3962						          reverse_condition (code))))
3963			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3964			      || (const_arg1
3965				  && rtx_equal_p (ent->comparison_const,
3966						  const_arg1))
3967			      || (GET_CODE (folded_arg1) == REG
3968				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3969			return (comparison_dominates_p (ent->comparison_code, code)
3970				? true_rtx : false_rtx);
3971		    }
3972		}
3973	    }
3974	}
3975
3976      /* If we are comparing against zero, see if the first operand is
3977	 equivalent to an IOR with a constant.  If so, we may be able to
3978	 determine the result of this comparison.  */
3979
3980      if (const_arg1 == const0_rtx)
3981	{
3982	  rtx y = lookup_as_function (folded_arg0, IOR);
3983	  rtx inner_const;
3984
3985	  if (y != 0
3986	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3987	      && GET_CODE (inner_const) == CONST_INT
3988	      && INTVAL (inner_const) != 0)
3989	    {
3990	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3991	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3992			      && (INTVAL (inner_const)
3993				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3994	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3995
3996#ifdef FLOAT_STORE_FLAG_VALUE
3997	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3998		{
3999		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4000			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4001		  false_rtx = CONST0_RTX (mode);
4002		}
4003#endif
4004
4005	      switch (code)
4006		{
4007		case EQ:
4008		  return false_rtx;
4009		case NE:
4010		  return true_rtx;
4011		case LT:  case LE:
4012		  if (has_sign)
4013		    return true_rtx;
4014		  break;
4015		case GT:  case GE:
4016		  if (has_sign)
4017		    return false_rtx;
4018		  break;
4019		default:
4020		  break;
4021		}
4022	    }
4023	}
4024
4025      new = simplify_relational_operation (code,
4026					   (mode_arg0 != VOIDmode
4027					    ? mode_arg0
4028					    : (GET_MODE (const_arg0
4029							 ? const_arg0
4030							 : folded_arg0)
4031					       != VOIDmode)
4032					    ? GET_MODE (const_arg0
4033							? const_arg0
4034							: folded_arg0)
4035					    : GET_MODE (const_arg1
4036							? const_arg1
4037							: folded_arg1)),
4038					   const_arg0 ? const_arg0 : folded_arg0,
4039					   const_arg1 ? const_arg1 : folded_arg1);
4040#ifdef FLOAT_STORE_FLAG_VALUE
4041      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4042	{
4043	  if (new == const0_rtx)
4044	    new = CONST0_RTX (mode);
4045	  else
4046	    new = (CONST_DOUBLE_FROM_REAL_VALUE
4047		   (FLOAT_STORE_FLAG_VALUE (mode), mode));
4048	}
4049#endif
4050      break;
4051
4052    case '2':
4053    case 'c':
4054      switch (code)
4055	{
4056	case PLUS:
4057	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4058	     with that LABEL_REF as its second operand.  If so, the result is
4059	     the first operand of that MINUS.  This handles switches with an
4060	     ADDR_DIFF_VEC table.  */
4061	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4062	    {
4063	      rtx y
4064		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4065		: lookup_as_function (folded_arg0, MINUS);
4066
4067	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4068		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4069		return XEXP (y, 0);
4070
4071	      /* Now try for a CONST of a MINUS like the above.  */
4072	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4073			: lookup_as_function (folded_arg0, CONST))) != 0
4074		  && GET_CODE (XEXP (y, 0)) == MINUS
4075		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4076		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4077		return XEXP (XEXP (y, 0), 0);
4078	    }
4079
4080	  /* Likewise if the operands are in the other order.  */
4081	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4082	    {
4083	      rtx y
4084		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4085		: lookup_as_function (folded_arg1, MINUS);
4086
4087	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4088		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4089		return XEXP (y, 0);
4090
4091	      /* Now try for a CONST of a MINUS like the above.  */
4092	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4093			: lookup_as_function (folded_arg1, CONST))) != 0
4094		  && GET_CODE (XEXP (y, 0)) == MINUS
4095		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4096		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4097		return XEXP (XEXP (y, 0), 0);
4098	    }
4099
4100	  /* If second operand is a register equivalent to a negative
4101	     CONST_INT, see if we can find a register equivalent to the
4102	     positive constant.  Make a MINUS if so.  Don't do this for
4103	     a non-negative constant since we might then alternate between
4104	     choosing positive and negative constants.  Having the positive
4105	     constant previously-used is the more common case.  Be sure
4106	     the resulting constant is non-negative; if const_arg1 were
4107	     the smallest negative number this would overflow: depending
4108	     on the mode, this would either just be the same value (and
4109	     hence not save anything) or be incorrect.  */
4110	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4111	      && INTVAL (const_arg1) < 0
4112	      /* This used to test
4113
4114	         -INTVAL (const_arg1) >= 0
4115
4116		 But The Sun V5.0 compilers mis-compiled that test.  So
4117		 instead we test for the problematic value in a more direct
4118		 manner and hope the Sun compilers get it correct.  */
4119	      && INTVAL (const_arg1) !=
4120	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4121	      && GET_CODE (folded_arg1) == REG)
4122	    {
4123	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4124	      struct table_elt *p
4125		= lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4126			  mode);
4127
4128	      if (p)
4129		for (p = p->first_same_value; p; p = p->next_same_value)
4130		  if (GET_CODE (p->exp) == REG)
4131		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4132						canon_reg (p->exp, NULL_RTX));
4133	    }
4134	  goto from_plus;
4135
4136	case MINUS:
4137	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4138	     If so, produce (PLUS Z C2-C).  */
4139	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4140	    {
4141	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4142	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4143		return fold_rtx (plus_constant (copy_rtx (y),
4144						-INTVAL (const_arg1)),
4145				 NULL_RTX);
4146	    }
4147
4148	  /* Fall through.  */
4149
4150	from_plus:
4151	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4152	case IOR:     case AND:       case XOR:
4153	case MULT:
4154	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4155	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4156	     is known to be of similar form, we may be able to replace the
4157	     operation with a combined operation.  This may eliminate the
4158	     intermediate operation if every use is simplified in this way.
4159	     Note that the similar optimization done by combine.c only works
4160	     if the intermediate operation's result has only one reference.  */
4161
4162	  if (GET_CODE (folded_arg0) == REG
4163	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4164	    {
4165	      int is_shift
4166		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4167	      rtx y = lookup_as_function (folded_arg0, code);
4168	      rtx inner_const;
4169	      enum rtx_code associate_code;
4170	      rtx new_const;
4171
4172	      if (y == 0
4173		  || 0 == (inner_const
4174			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4175		  || GET_CODE (inner_const) != CONST_INT
4176		  /* If we have compiled a statement like
4177		     "if (x == (x & mask1))", and now are looking at
4178		     "x & mask2", we will have a case where the first operand
4179		     of Y is the same as our first operand.  Unless we detect
4180		     this case, an infinite loop will result.  */
4181		  || XEXP (y, 0) == folded_arg0)
4182		break;
4183
4184	      /* Don't associate these operations if they are a PLUS with the
4185		 same constant and it is a power of two.  These might be doable
4186		 with a pre- or post-increment.  Similarly for two subtracts of
4187		 identical powers of two with post decrement.  */
4188
4189	      if (code == PLUS && const_arg1 == inner_const
4190		  && ((HAVE_PRE_INCREMENT
4191			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4192		      || (HAVE_POST_INCREMENT
4193			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4194		      || (HAVE_PRE_DECREMENT
4195			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4196		      || (HAVE_POST_DECREMENT
4197			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4198		break;
4199
4200	      /* Compute the code used to compose the constants.  For example,
4201		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
4202
4203	      associate_code = (is_shift || code == MINUS ? PLUS : code);
4204
4205	      new_const = simplify_binary_operation (associate_code, mode,
4206						     const_arg1, inner_const);
4207
4208	      if (new_const == 0)
4209		break;
4210
4211	      /* If we are associating shift operations, don't let this
4212		 produce a shift of the size of the object or larger.
4213		 This could occur when we follow a sign-extend by a right
4214		 shift on a machine that does a sign-extend as a pair
4215		 of shifts.  */
4216
4217	      if (is_shift && GET_CODE (new_const) == CONST_INT
4218		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4219		{
4220		  /* As an exception, we can turn an ASHIFTRT of this
4221		     form into a shift of the number of bits - 1.  */
4222		  if (code == ASHIFTRT)
4223		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4224		  else
4225		    break;
4226		}
4227
4228	      y = copy_rtx (XEXP (y, 0));
4229
4230	      /* If Y contains our first operand (the most common way this
4231		 can happen is if Y is a MEM), we would do into an infinite
4232		 loop if we tried to fold it.  So don't in that case.  */
4233
4234	      if (! reg_mentioned_p (folded_arg0, y))
4235		y = fold_rtx (y, insn);
4236
4237	      return simplify_gen_binary (code, mode, y, new_const);
4238	    }
4239	  break;
4240
4241	case DIV:       case UDIV:
4242	  /* ??? The associative optimization performed immediately above is
4243	     also possible for DIV and UDIV using associate_code of MULT.
4244	     However, we would need extra code to verify that the
4245	     multiplication does not overflow, that is, there is no overflow
4246	     in the calculation of new_const.  */
4247	  break;
4248
4249	default:
4250	  break;
4251	}
4252
4253      new = simplify_binary_operation (code, mode,
4254				       const_arg0 ? const_arg0 : folded_arg0,
4255				       const_arg1 ? const_arg1 : folded_arg1);
4256      break;
4257
4258    case 'o':
4259      /* (lo_sum (high X) X) is simply X.  */
4260      if (code == LO_SUM && const_arg0 != 0
4261	  && GET_CODE (const_arg0) == HIGH
4262	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4263	return const_arg1;
4264      break;
4265
4266    case '3':
4267    case 'b':
4268      new = simplify_ternary_operation (code, mode, mode_arg0,
4269					const_arg0 ? const_arg0 : folded_arg0,
4270					const_arg1 ? const_arg1 : folded_arg1,
4271					const_arg2 ? const_arg2 : XEXP (x, 2));
4272      break;
4273
4274    case 'x':
4275      /* Eliminate CONSTANT_P_RTX if its constant.  */
4276      if (code == CONSTANT_P_RTX)
4277	{
4278	  if (const_arg0)
4279	    return const1_rtx;
4280	  if (optimize == 0 || !flag_gcse)
4281	    return const0_rtx;
4282	}
4283      break;
4284    }
4285
4286  return new ? new : x;
4287}
4288
4289/* Return a constant value currently equivalent to X.
4290   Return 0 if we don't know one.  */
4291
4292static rtx
4293equiv_constant (rtx x)
4294{
4295  if (GET_CODE (x) == REG
4296      && REGNO_QTY_VALID_P (REGNO (x)))
4297    {
4298      int x_q = REG_QTY (REGNO (x));
4299      struct qty_table_elem *x_ent = &qty_table[x_q];
4300
4301      if (x_ent->const_rtx)
4302	x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4303    }
4304
4305  if (x == 0 || CONSTANT_P (x))
4306    return x;
4307
4308  /* If X is a MEM, try to fold it outside the context of any insn to see if
4309     it might be equivalent to a constant.  That handles the case where it
4310     is a constant-pool reference.  Then try to look it up in the hash table
4311     in case it is something whose value we have seen before.  */
4312
4313  if (GET_CODE (x) == MEM)
4314    {
4315      struct table_elt *elt;
4316
4317      x = fold_rtx (x, NULL_RTX);
4318      if (CONSTANT_P (x))
4319	return x;
4320
4321      elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4322      if (elt == 0)
4323	return 0;
4324
4325      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4326	if (elt->is_const && CONSTANT_P (elt->exp))
4327	  return elt->exp;
4328    }
4329
4330  return 0;
4331}
4332
4333/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4334   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4335   least-significant part of X.
4336   MODE specifies how big a part of X to return.
4337
4338   If the requested operation cannot be done, 0 is returned.
4339
4340   This is similar to gen_lowpart in emit-rtl.c.  */
4341
4342rtx
4343gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4344{
4345  rtx result = gen_lowpart_common (mode, x);
4346
4347  if (result)
4348    return result;
4349  else if (GET_CODE (x) == MEM)
4350    {
4351      /* This is the only other case we handle.  */
4352      int offset = 0;
4353      rtx new;
4354
4355      if (WORDS_BIG_ENDIAN)
4356	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4357		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4358      if (BYTES_BIG_ENDIAN)
4359	/* Adjust the address so that the address-after-the-data is
4360	   unchanged.  */
4361	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4362		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4363
4364      new = adjust_address_nv (x, mode, offset);
4365      if (! memory_address_p (mode, XEXP (new, 0)))
4366	return 0;
4367
4368      return new;
4369    }
4370  else
4371    return 0;
4372}
4373
4374/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4375   branch.  It will be zero if not.
4376
4377   In certain cases, this can cause us to add an equivalence.  For example,
4378   if we are following the taken case of
4379	if (i == 2)
4380   we can add the fact that `i' and '2' are now equivalent.
4381
4382   In any case, we can record that this comparison was passed.  If the same
4383   comparison is seen later, we will know its value.  */
4384
4385static void
4386record_jump_equiv (rtx insn, int taken)
4387{
4388  int cond_known_true;
4389  rtx op0, op1;
4390  rtx set;
4391  enum machine_mode mode, mode0, mode1;
4392  int reversed_nonequality = 0;
4393  enum rtx_code code;
4394
4395  /* Ensure this is the right kind of insn.  */
4396  if (! any_condjump_p (insn))
4397    return;
4398  set = pc_set (insn);
4399
4400  /* See if this jump condition is known true or false.  */
4401  if (taken)
4402    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4403  else
4404    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4405
4406  /* Get the type of comparison being done and the operands being compared.
4407     If we had to reverse a non-equality condition, record that fact so we
4408     know that it isn't valid for floating-point.  */
4409  code = GET_CODE (XEXP (SET_SRC (set), 0));
4410  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4411  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4412
4413  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4414  if (! cond_known_true)
4415    {
4416      code = reversed_comparison_code_parts (code, op0, op1, insn);
4417
4418      /* Don't remember if we can't find the inverse.  */
4419      if (code == UNKNOWN)
4420	return;
4421    }
4422
4423  /* The mode is the mode of the non-constant.  */
4424  mode = mode0;
4425  if (mode1 != VOIDmode)
4426    mode = mode1;
4427
4428  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4429}
4430
4431/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4432   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4433   Make any useful entries we can with that information.  Called from
4434   above function and called recursively.  */
4435
4436static void
4437record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4438		  rtx op1, int reversed_nonequality)
4439{
4440  unsigned op0_hash, op1_hash;
4441  int op0_in_memory, op1_in_memory;
4442  struct table_elt *op0_elt, *op1_elt;
4443
4444  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4445     we know that they are also equal in the smaller mode (this is also
4446     true for all smaller modes whether or not there is a SUBREG, but
4447     is not worth testing for with no SUBREG).  */
4448
4449  /* Note that GET_MODE (op0) may not equal MODE.  */
4450  if (code == EQ && GET_CODE (op0) == SUBREG
4451      && (GET_MODE_SIZE (GET_MODE (op0))
4452	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4453    {
4454      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4455      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4456
4457      record_jump_cond (code, mode, SUBREG_REG (op0),
4458			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4459			reversed_nonequality);
4460    }
4461
4462  if (code == EQ && GET_CODE (op1) == SUBREG
4463      && (GET_MODE_SIZE (GET_MODE (op1))
4464	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4465    {
4466      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4467      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4468
4469      record_jump_cond (code, mode, SUBREG_REG (op1),
4470			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4471			reversed_nonequality);
4472    }
4473
4474  /* Similarly, if this is an NE comparison, and either is a SUBREG
4475     making a smaller mode, we know the whole thing is also NE.  */
4476
4477  /* Note that GET_MODE (op0) may not equal MODE;
4478     if we test MODE instead, we can get an infinite recursion
4479     alternating between two modes each wider than MODE.  */
4480
4481  if (code == NE && GET_CODE (op0) == SUBREG
4482      && subreg_lowpart_p (op0)
4483      && (GET_MODE_SIZE (GET_MODE (op0))
4484	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4485    {
4486      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4487      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4488
4489      record_jump_cond (code, mode, SUBREG_REG (op0),
4490			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4491			reversed_nonequality);
4492    }
4493
4494  if (code == NE && GET_CODE (op1) == SUBREG
4495      && subreg_lowpart_p (op1)
4496      && (GET_MODE_SIZE (GET_MODE (op1))
4497	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4498    {
4499      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4500      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4501
4502      record_jump_cond (code, mode, SUBREG_REG (op1),
4503			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4504			reversed_nonequality);
4505    }
4506
4507  /* Hash both operands.  */
4508
4509  do_not_record = 0;
4510  hash_arg_in_memory = 0;
4511  op0_hash = HASH (op0, mode);
4512  op0_in_memory = hash_arg_in_memory;
4513
4514  if (do_not_record)
4515    return;
4516
4517  do_not_record = 0;
4518  hash_arg_in_memory = 0;
4519  op1_hash = HASH (op1, mode);
4520  op1_in_memory = hash_arg_in_memory;
4521
4522  if (do_not_record)
4523    return;
4524
4525  /* Look up both operands.  */
4526  op0_elt = lookup (op0, op0_hash, mode);
4527  op1_elt = lookup (op1, op1_hash, mode);
4528
4529  /* If both operands are already equivalent or if they are not in the
4530     table but are identical, do nothing.  */
4531  if ((op0_elt != 0 && op1_elt != 0
4532       && op0_elt->first_same_value == op1_elt->first_same_value)
4533      || op0 == op1 || rtx_equal_p (op0, op1))
4534    return;
4535
4536  /* If we aren't setting two things equal all we can do is save this
4537     comparison.   Similarly if this is floating-point.  In the latter
4538     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4539     If we record the equality, we might inadvertently delete code
4540     whose intent was to change -0 to +0.  */
4541
4542  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4543    {
4544      struct qty_table_elem *ent;
4545      int qty;
4546
4547      /* If we reversed a floating-point comparison, if OP0 is not a
4548	 register, or if OP1 is neither a register or constant, we can't
4549	 do anything.  */
4550
4551      if (GET_CODE (op1) != REG)
4552	op1 = equiv_constant (op1);
4553
4554      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4555	  || GET_CODE (op0) != REG || op1 == 0)
4556	return;
4557
4558      /* Put OP0 in the hash table if it isn't already.  This gives it a
4559	 new quantity number.  */
4560      if (op0_elt == 0)
4561	{
4562	  if (insert_regs (op0, NULL, 0))
4563	    {
4564	      rehash_using_reg (op0);
4565	      op0_hash = HASH (op0, mode);
4566
4567	      /* If OP0 is contained in OP1, this changes its hash code
4568		 as well.  Faster to rehash than to check, except
4569		 for the simple case of a constant.  */
4570	      if (! CONSTANT_P (op1))
4571		op1_hash = HASH (op1,mode);
4572	    }
4573
4574	  op0_elt = insert (op0, NULL, op0_hash, mode);
4575	  op0_elt->in_memory = op0_in_memory;
4576	}
4577
4578      qty = REG_QTY (REGNO (op0));
4579      ent = &qty_table[qty];
4580
4581      ent->comparison_code = code;
4582      if (GET_CODE (op1) == REG)
4583	{
4584	  /* Look it up again--in case op0 and op1 are the same.  */
4585	  op1_elt = lookup (op1, op1_hash, mode);
4586
4587	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4588	  if (op1_elt == 0)
4589	    {
4590	      if (insert_regs (op1, NULL, 0))
4591		{
4592		  rehash_using_reg (op1);
4593		  op1_hash = HASH (op1, mode);
4594		}
4595
4596	      op1_elt = insert (op1, NULL, op1_hash, mode);
4597	      op1_elt->in_memory = op1_in_memory;
4598	    }
4599
4600	  ent->comparison_const = NULL_RTX;
4601	  ent->comparison_qty = REG_QTY (REGNO (op1));
4602	}
4603      else
4604	{
4605	  ent->comparison_const = op1;
4606	  ent->comparison_qty = -1;
4607	}
4608
4609      return;
4610    }
4611
4612  /* If either side is still missing an equivalence, make it now,
4613     then merge the equivalences.  */
4614
4615  if (op0_elt == 0)
4616    {
4617      if (insert_regs (op0, NULL, 0))
4618	{
4619	  rehash_using_reg (op0);
4620	  op0_hash = HASH (op0, mode);
4621	}
4622
4623      op0_elt = insert (op0, NULL, op0_hash, mode);
4624      op0_elt->in_memory = op0_in_memory;
4625    }
4626
4627  if (op1_elt == 0)
4628    {
4629      if (insert_regs (op1, NULL, 0))
4630	{
4631	  rehash_using_reg (op1);
4632	  op1_hash = HASH (op1, mode);
4633	}
4634
4635      op1_elt = insert (op1, NULL, op1_hash, mode);
4636      op1_elt->in_memory = op1_in_memory;
4637    }
4638
4639  merge_equiv_classes (op0_elt, op1_elt);
4640  last_jump_equiv_class = op0_elt;
4641}
4642
4643/* CSE processing for one instruction.
4644   First simplify sources and addresses of all assignments
4645   in the instruction, using previously-computed equivalents values.
4646   Then install the new sources and destinations in the table
4647   of available values.
4648
4649   If LIBCALL_INSN is nonzero, don't record any equivalence made in
4650   the insn.  It means that INSN is inside libcall block.  In this
4651   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4652
4653/* Data on one SET contained in the instruction.  */
4654
4655struct set
4656{
4657  /* The SET rtx itself.  */
4658  rtx rtl;
4659  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4660  rtx src;
4661  /* The hash-table element for the SET_SRC of the SET.  */
4662  struct table_elt *src_elt;
4663  /* Hash value for the SET_SRC.  */
4664  unsigned src_hash;
4665  /* Hash value for the SET_DEST.  */
4666  unsigned dest_hash;
4667  /* The SET_DEST, with SUBREG, etc., stripped.  */
4668  rtx inner_dest;
4669  /* Nonzero if the SET_SRC is in memory.  */
4670  char src_in_memory;
4671  /* Nonzero if the SET_SRC contains something
4672     whose value cannot be predicted and understood.  */
4673  char src_volatile;
4674  /* Original machine mode, in case it becomes a CONST_INT.
4675     The size of this field should match the size of the mode
4676     field of struct rtx_def (see rtl.h).  */
4677  ENUM_BITFIELD(machine_mode) mode : 8;
4678  /* A constant equivalent for SET_SRC, if any.  */
4679  rtx src_const;
4680  /* Original SET_SRC value used for libcall notes.  */
4681  rtx orig_src;
4682  /* Hash value of constant equivalent for SET_SRC.  */
4683  unsigned src_const_hash;
4684  /* Table entry for constant equivalent for SET_SRC, if any.  */
4685  struct table_elt *src_const_elt;
4686};
4687
4688static void
4689cse_insn (rtx insn, rtx libcall_insn)
4690{
4691  rtx x = PATTERN (insn);
4692  int i;
4693  rtx tem;
4694  int n_sets = 0;
4695
4696#ifdef HAVE_cc0
4697  /* Records what this insn does to set CC0.  */
4698  rtx this_insn_cc0 = 0;
4699  enum machine_mode this_insn_cc0_mode = VOIDmode;
4700#endif
4701
4702  rtx src_eqv = 0;
4703  struct table_elt *src_eqv_elt = 0;
4704  int src_eqv_volatile = 0;
4705  int src_eqv_in_memory = 0;
4706  unsigned src_eqv_hash = 0;
4707
4708  struct set *sets = (struct set *) 0;
4709
4710  this_insn = insn;
4711
4712  /* Find all the SETs and CLOBBERs in this instruction.
4713     Record all the SETs in the array `set' and count them.
4714     Also determine whether there is a CLOBBER that invalidates
4715     all memory references, or all references at varying addresses.  */
4716
4717  if (GET_CODE (insn) == CALL_INSN)
4718    {
4719      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4720	{
4721	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4722	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4723	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4724	}
4725    }
4726
4727  if (GET_CODE (x) == SET)
4728    {
4729      sets = alloca (sizeof (struct set));
4730      sets[0].rtl = x;
4731
4732      /* Ignore SETs that are unconditional jumps.
4733	 They never need cse processing, so this does not hurt.
4734	 The reason is not efficiency but rather
4735	 so that we can test at the end for instructions
4736	 that have been simplified to unconditional jumps
4737	 and not be misled by unchanged instructions
4738	 that were unconditional jumps to begin with.  */
4739      if (SET_DEST (x) == pc_rtx
4740	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4741	;
4742
4743      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4744	 The hard function value register is used only once, to copy to
4745	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4746	 Ensure we invalidate the destination register.  On the 80386 no
4747	 other code would invalidate it since it is a fixed_reg.
4748	 We need not check the return of apply_change_group; see canon_reg.  */
4749
4750      else if (GET_CODE (SET_SRC (x)) == CALL)
4751	{
4752	  canon_reg (SET_SRC (x), insn);
4753	  apply_change_group ();
4754	  fold_rtx (SET_SRC (x), insn);
4755	  invalidate (SET_DEST (x), VOIDmode);
4756	}
4757      else
4758	n_sets = 1;
4759    }
4760  else if (GET_CODE (x) == PARALLEL)
4761    {
4762      int lim = XVECLEN (x, 0);
4763
4764      sets = alloca (lim * sizeof (struct set));
4765
4766      /* Find all regs explicitly clobbered in this insn,
4767	 and ensure they are not replaced with any other regs
4768	 elsewhere in this insn.
4769	 When a reg that is clobbered is also used for input,
4770	 we should presume that that is for a reason,
4771	 and we should not substitute some other register
4772	 which is not supposed to be clobbered.
4773	 Therefore, this loop cannot be merged into the one below
4774	 because a CALL may precede a CLOBBER and refer to the
4775	 value clobbered.  We must not let a canonicalization do
4776	 anything in that case.  */
4777      for (i = 0; i < lim; i++)
4778	{
4779	  rtx y = XVECEXP (x, 0, i);
4780	  if (GET_CODE (y) == CLOBBER)
4781	    {
4782	      rtx clobbered = XEXP (y, 0);
4783
4784	      if (GET_CODE (clobbered) == REG
4785		  || GET_CODE (clobbered) == SUBREG)
4786		invalidate (clobbered, VOIDmode);
4787	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4788		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4789		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4790	    }
4791	}
4792
4793      for (i = 0; i < lim; i++)
4794	{
4795	  rtx y = XVECEXP (x, 0, i);
4796	  if (GET_CODE (y) == SET)
4797	    {
4798	      /* As above, we ignore unconditional jumps and call-insns and
4799		 ignore the result of apply_change_group.  */
4800	      if (GET_CODE (SET_SRC (y)) == CALL)
4801		{
4802		  canon_reg (SET_SRC (y), insn);
4803		  apply_change_group ();
4804		  fold_rtx (SET_SRC (y), insn);
4805		  invalidate (SET_DEST (y), VOIDmode);
4806		}
4807	      else if (SET_DEST (y) == pc_rtx
4808		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4809		;
4810	      else
4811		sets[n_sets++].rtl = y;
4812	    }
4813	  else if (GET_CODE (y) == CLOBBER)
4814	    {
4815	      /* If we clobber memory, canon the address.
4816		 This does nothing when a register is clobbered
4817		 because we have already invalidated the reg.  */
4818	      if (GET_CODE (XEXP (y, 0)) == MEM)
4819		canon_reg (XEXP (y, 0), NULL_RTX);
4820	    }
4821	  else if (GET_CODE (y) == USE
4822		   && ! (GET_CODE (XEXP (y, 0)) == REG
4823			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4824	    canon_reg (y, NULL_RTX);
4825	  else if (GET_CODE (y) == CALL)
4826	    {
4827	      /* The result of apply_change_group can be ignored; see
4828		 canon_reg.  */
4829	      canon_reg (y, insn);
4830	      apply_change_group ();
4831	      fold_rtx (y, insn);
4832	    }
4833	}
4834    }
4835  else if (GET_CODE (x) == CLOBBER)
4836    {
4837      if (GET_CODE (XEXP (x, 0)) == MEM)
4838	canon_reg (XEXP (x, 0), NULL_RTX);
4839    }
4840
4841  /* Canonicalize a USE of a pseudo register or memory location.  */
4842  else if (GET_CODE (x) == USE
4843	   && ! (GET_CODE (XEXP (x, 0)) == REG
4844		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4845    canon_reg (XEXP (x, 0), NULL_RTX);
4846  else if (GET_CODE (x) == CALL)
4847    {
4848      /* The result of apply_change_group can be ignored; see canon_reg.  */
4849      canon_reg (x, insn);
4850      apply_change_group ();
4851      fold_rtx (x, insn);
4852    }
4853
4854  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4855     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4856     is handled specially for this case, and if it isn't set, then there will
4857     be no equivalence for the destination.  */
4858  if (n_sets == 1 && REG_NOTES (insn) != 0
4859      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4860      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4861	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4862    {
4863      src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4864      XEXP (tem, 0) = src_eqv;
4865    }
4866
4867  /* Canonicalize sources and addresses of destinations.
4868     We do this in a separate pass to avoid problems when a MATCH_DUP is
4869     present in the insn pattern.  In that case, we want to ensure that
4870     we don't break the duplicate nature of the pattern.  So we will replace
4871     both operands at the same time.  Otherwise, we would fail to find an
4872     equivalent substitution in the loop calling validate_change below.
4873
4874     We used to suppress canonicalization of DEST if it appears in SRC,
4875     but we don't do this any more.  */
4876
4877  for (i = 0; i < n_sets; i++)
4878    {
4879      rtx dest = SET_DEST (sets[i].rtl);
4880      rtx src = SET_SRC (sets[i].rtl);
4881      rtx new = canon_reg (src, insn);
4882      int insn_code;
4883
4884      sets[i].orig_src = src;
4885      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4886	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4887	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4888	  || (insn_code = recog_memoized (insn)) < 0
4889	  || insn_data[insn_code].n_dups > 0)
4890	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4891      else
4892	SET_SRC (sets[i].rtl) = new;
4893
4894      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4895	{
4896	  validate_change (insn, &XEXP (dest, 1),
4897			   canon_reg (XEXP (dest, 1), insn), 1);
4898	  validate_change (insn, &XEXP (dest, 2),
4899			   canon_reg (XEXP (dest, 2), insn), 1);
4900	}
4901
4902      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4903	     || GET_CODE (dest) == ZERO_EXTRACT
4904	     || GET_CODE (dest) == SIGN_EXTRACT)
4905	dest = XEXP (dest, 0);
4906
4907      if (GET_CODE (dest) == MEM)
4908	canon_reg (dest, insn);
4909    }
4910
4911  /* Now that we have done all the replacements, we can apply the change
4912     group and see if they all work.  Note that this will cause some
4913     canonicalizations that would have worked individually not to be applied
4914     because some other canonicalization didn't work, but this should not
4915     occur often.
4916
4917     The result of apply_change_group can be ignored; see canon_reg.  */
4918
4919  apply_change_group ();
4920
4921  /* Set sets[i].src_elt to the class each source belongs to.
4922     Detect assignments from or to volatile things
4923     and set set[i] to zero so they will be ignored
4924     in the rest of this function.
4925
4926     Nothing in this loop changes the hash table or the register chains.  */
4927
4928  for (i = 0; i < n_sets; i++)
4929    {
4930      rtx src, dest;
4931      rtx src_folded;
4932      struct table_elt *elt = 0, *p;
4933      enum machine_mode mode;
4934      rtx src_eqv_here;
4935      rtx src_const = 0;
4936      rtx src_related = 0;
4937      struct table_elt *src_const_elt = 0;
4938      int src_cost = MAX_COST;
4939      int src_eqv_cost = MAX_COST;
4940      int src_folded_cost = MAX_COST;
4941      int src_related_cost = MAX_COST;
4942      int src_elt_cost = MAX_COST;
4943      int src_regcost = MAX_COST;
4944      int src_eqv_regcost = MAX_COST;
4945      int src_folded_regcost = MAX_COST;
4946      int src_related_regcost = MAX_COST;
4947      int src_elt_regcost = MAX_COST;
4948      /* Set nonzero if we need to call force_const_mem on with the
4949	 contents of src_folded before using it.  */
4950      int src_folded_force_flag = 0;
4951
4952      dest = SET_DEST (sets[i].rtl);
4953      src = SET_SRC (sets[i].rtl);
4954
4955      /* If SRC is a constant that has no machine mode,
4956	 hash it with the destination's machine mode.
4957	 This way we can keep different modes separate.  */
4958
4959      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4960      sets[i].mode = mode;
4961
4962      if (src_eqv)
4963	{
4964	  enum machine_mode eqvmode = mode;
4965	  if (GET_CODE (dest) == STRICT_LOW_PART)
4966	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4967	  do_not_record = 0;
4968	  hash_arg_in_memory = 0;
4969	  src_eqv_hash = HASH (src_eqv, eqvmode);
4970
4971	  /* Find the equivalence class for the equivalent expression.  */
4972
4973	  if (!do_not_record)
4974	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4975
4976	  src_eqv_volatile = do_not_record;
4977	  src_eqv_in_memory = hash_arg_in_memory;
4978	}
4979
4980      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4981	 value of the INNER register, not the destination.  So it is not
4982	 a valid substitution for the source.  But save it for later.  */
4983      if (GET_CODE (dest) == STRICT_LOW_PART)
4984	src_eqv_here = 0;
4985      else
4986	src_eqv_here = src_eqv;
4987
4988      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4989	 simplified result, which may not necessarily be valid.  */
4990      src_folded = fold_rtx (src, insn);
4991
4992#if 0
4993      /* ??? This caused bad code to be generated for the m68k port with -O2.
4994	 Suppose src is (CONST_INT -1), and that after truncation src_folded
4995	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
4996	 At the end we will add src and src_const to the same equivalence
4997	 class.  We now have 3 and -1 on the same equivalence class.  This
4998	 causes later instructions to be mis-optimized.  */
4999      /* If storing a constant in a bitfield, pre-truncate the constant
5000	 so we will be able to record it later.  */
5001      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5002	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5003	{
5004	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5005
5006	  if (GET_CODE (src) == CONST_INT
5007	      && GET_CODE (width) == CONST_INT
5008	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5009	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5010	    src_folded
5011	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5012					  << INTVAL (width)) - 1));
5013	}
5014#endif
5015
5016      /* Compute SRC's hash code, and also notice if it
5017	 should not be recorded at all.  In that case,
5018	 prevent any further processing of this assignment.  */
5019      do_not_record = 0;
5020      hash_arg_in_memory = 0;
5021
5022      sets[i].src = src;
5023      sets[i].src_hash = HASH (src, mode);
5024      sets[i].src_volatile = do_not_record;
5025      sets[i].src_in_memory = hash_arg_in_memory;
5026
5027      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5028	 a pseudo, do not record SRC.  Using SRC as a replacement for
5029	 anything else will be incorrect in that situation.  Note that
5030	 this usually occurs only for stack slots, in which case all the
5031	 RTL would be referring to SRC, so we don't lose any optimization
5032	 opportunities by not having SRC in the hash table.  */
5033
5034      if (GET_CODE (src) == MEM
5035	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5036	  && GET_CODE (dest) == REG
5037	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5038	sets[i].src_volatile = 1;
5039
5040#if 0
5041      /* It is no longer clear why we used to do this, but it doesn't
5042	 appear to still be needed.  So let's try without it since this
5043	 code hurts cse'ing widened ops.  */
5044      /* If source is a perverse subreg (such as QI treated as an SI),
5045	 treat it as volatile.  It may do the work of an SI in one context
5046	 where the extra bits are not being used, but cannot replace an SI
5047	 in general.  */
5048      if (GET_CODE (src) == SUBREG
5049	  && (GET_MODE_SIZE (GET_MODE (src))
5050	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5051	sets[i].src_volatile = 1;
5052#endif
5053
5054      /* Locate all possible equivalent forms for SRC.  Try to replace
5055         SRC in the insn with each cheaper equivalent.
5056
5057         We have the following types of equivalents: SRC itself, a folded
5058         version, a value given in a REG_EQUAL note, or a value related
5059	 to a constant.
5060
5061         Each of these equivalents may be part of an additional class
5062         of equivalents (if more than one is in the table, they must be in
5063         the same class; we check for this).
5064
5065	 If the source is volatile, we don't do any table lookups.
5066
5067         We note any constant equivalent for possible later use in a
5068         REG_NOTE.  */
5069
5070      if (!sets[i].src_volatile)
5071	elt = lookup (src, sets[i].src_hash, mode);
5072
5073      sets[i].src_elt = elt;
5074
5075      if (elt && src_eqv_here && src_eqv_elt)
5076	{
5077	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5078	    {
5079	      /* The REG_EQUAL is indicating that two formerly distinct
5080		 classes are now equivalent.  So merge them.  */
5081	      merge_equiv_classes (elt, src_eqv_elt);
5082	      src_eqv_hash = HASH (src_eqv, elt->mode);
5083	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5084	    }
5085
5086	  src_eqv_here = 0;
5087	}
5088
5089      else if (src_eqv_elt)
5090	elt = src_eqv_elt;
5091
5092      /* Try to find a constant somewhere and record it in `src_const'.
5093	 Record its table element, if any, in `src_const_elt'.  Look in
5094	 any known equivalences first.  (If the constant is not in the
5095	 table, also set `sets[i].src_const_hash').  */
5096      if (elt)
5097	for (p = elt->first_same_value; p; p = p->next_same_value)
5098	  if (p->is_const)
5099	    {
5100	      src_const = p->exp;
5101	      src_const_elt = elt;
5102	      break;
5103	    }
5104
5105      if (src_const == 0
5106	  && (CONSTANT_P (src_folded)
5107	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5108		 "constant" here so we will record it. This allows us
5109		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5110	      || (GET_CODE (src_folded) == MINUS
5111		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5112		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5113	src_const = src_folded, src_const_elt = elt;
5114      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5115	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5116
5117      /* If we don't know if the constant is in the table, get its
5118	 hash code and look it up.  */
5119      if (src_const && src_const_elt == 0)
5120	{
5121	  sets[i].src_const_hash = HASH (src_const, mode);
5122	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5123	}
5124
5125      sets[i].src_const = src_const;
5126      sets[i].src_const_elt = src_const_elt;
5127
5128      /* If the constant and our source are both in the table, mark them as
5129	 equivalent.  Otherwise, if a constant is in the table but the source
5130	 isn't, set ELT to it.  */
5131      if (src_const_elt && elt
5132	  && src_const_elt->first_same_value != elt->first_same_value)
5133	merge_equiv_classes (elt, src_const_elt);
5134      else if (src_const_elt && elt == 0)
5135	elt = src_const_elt;
5136
5137      /* See if there is a register linearly related to a constant
5138         equivalent of SRC.  */
5139      if (src_const
5140	  && (GET_CODE (src_const) == CONST
5141	      || (src_const_elt && src_const_elt->related_value != 0)))
5142	{
5143	  src_related = use_related_value (src_const, src_const_elt);
5144	  if (src_related)
5145	    {
5146	      struct table_elt *src_related_elt
5147		= lookup (src_related, HASH (src_related, mode), mode);
5148	      if (src_related_elt && elt)
5149		{
5150		  if (elt->first_same_value
5151		      != src_related_elt->first_same_value)
5152		    /* This can occur when we previously saw a CONST
5153		       involving a SYMBOL_REF and then see the SYMBOL_REF
5154		       twice.  Merge the involved classes.  */
5155		    merge_equiv_classes (elt, src_related_elt);
5156
5157		  src_related = 0;
5158		  src_related_elt = 0;
5159		}
5160	      else if (src_related_elt && elt == 0)
5161		elt = src_related_elt;
5162	    }
5163	}
5164
5165      /* See if we have a CONST_INT that is already in a register in a
5166	 wider mode.  */
5167
5168      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5169	  && GET_MODE_CLASS (mode) == MODE_INT
5170	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5171	{
5172	  enum machine_mode wider_mode;
5173
5174	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5175	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5176	       && src_related == 0;
5177	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5178	    {
5179	      struct table_elt *const_elt
5180		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5181
5182	      if (const_elt == 0)
5183		continue;
5184
5185	      for (const_elt = const_elt->first_same_value;
5186		   const_elt; const_elt = const_elt->next_same_value)
5187		if (GET_CODE (const_elt->exp) == REG)
5188		  {
5189		    src_related = gen_lowpart_if_possible (mode,
5190							   const_elt->exp);
5191		    break;
5192		  }
5193	    }
5194	}
5195
5196      /* Another possibility is that we have an AND with a constant in
5197	 a mode narrower than a word.  If so, it might have been generated
5198	 as part of an "if" which would narrow the AND.  If we already
5199	 have done the AND in a wider mode, we can use a SUBREG of that
5200	 value.  */
5201
5202      if (flag_expensive_optimizations && ! src_related
5203	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5204	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5205	{
5206	  enum machine_mode tmode;
5207	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5208
5209	  for (tmode = GET_MODE_WIDER_MODE (mode);
5210	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5211	       tmode = GET_MODE_WIDER_MODE (tmode))
5212	    {
5213	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5214	      struct table_elt *larger_elt;
5215
5216	      if (inner)
5217		{
5218		  PUT_MODE (new_and, tmode);
5219		  XEXP (new_and, 0) = inner;
5220		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5221		  if (larger_elt == 0)
5222		    continue;
5223
5224		  for (larger_elt = larger_elt->first_same_value;
5225		       larger_elt; larger_elt = larger_elt->next_same_value)
5226		    if (GET_CODE (larger_elt->exp) == REG)
5227		      {
5228			src_related
5229			  = gen_lowpart_if_possible (mode, larger_elt->exp);
5230			break;
5231		      }
5232
5233		  if (src_related)
5234		    break;
5235		}
5236	    }
5237	}
5238
5239#ifdef LOAD_EXTEND_OP
5240      /* See if a MEM has already been loaded with a widening operation;
5241	 if it has, we can use a subreg of that.  Many CISC machines
5242	 also have such operations, but this is only likely to be
5243	 beneficial these machines.  */
5244
5245      if (flag_expensive_optimizations && src_related == 0
5246	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5247	  && GET_MODE_CLASS (mode) == MODE_INT
5248	  && GET_CODE (src) == MEM && ! do_not_record
5249	  && LOAD_EXTEND_OP (mode) != NIL)
5250	{
5251	  enum machine_mode tmode;
5252
5253	  /* Set what we are trying to extend and the operation it might
5254	     have been extended with.  */
5255	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5256	  XEXP (memory_extend_rtx, 0) = src;
5257
5258	  for (tmode = GET_MODE_WIDER_MODE (mode);
5259	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5260	       tmode = GET_MODE_WIDER_MODE (tmode))
5261	    {
5262	      struct table_elt *larger_elt;
5263
5264	      PUT_MODE (memory_extend_rtx, tmode);
5265	      larger_elt = lookup (memory_extend_rtx,
5266				   HASH (memory_extend_rtx, tmode), tmode);
5267	      if (larger_elt == 0)
5268		continue;
5269
5270	      for (larger_elt = larger_elt->first_same_value;
5271		   larger_elt; larger_elt = larger_elt->next_same_value)
5272		if (GET_CODE (larger_elt->exp) == REG)
5273		  {
5274		    src_related = gen_lowpart_if_possible (mode,
5275							   larger_elt->exp);
5276		    break;
5277		  }
5278
5279	      if (src_related)
5280		break;
5281	    }
5282	}
5283#endif /* LOAD_EXTEND_OP */
5284
5285      if (src == src_folded)
5286	src_folded = 0;
5287
5288      /* At this point, ELT, if nonzero, points to a class of expressions
5289         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5290	 and SRC_RELATED, if nonzero, each contain additional equivalent
5291	 expressions.  Prune these latter expressions by deleting expressions
5292	 already in the equivalence class.
5293
5294	 Check for an equivalent identical to the destination.  If found,
5295	 this is the preferred equivalent since it will likely lead to
5296	 elimination of the insn.  Indicate this by placing it in
5297	 `src_related'.  */
5298
5299      if (elt)
5300	elt = elt->first_same_value;
5301      for (p = elt; p; p = p->next_same_value)
5302	{
5303	  enum rtx_code code = GET_CODE (p->exp);
5304
5305	  /* If the expression is not valid, ignore it.  Then we do not
5306	     have to check for validity below.  In most cases, we can use
5307	     `rtx_equal_p', since canonicalization has already been done.  */
5308	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5309	    continue;
5310
5311	  /* Also skip paradoxical subregs, unless that's what we're
5312	     looking for.  */
5313	  if (code == SUBREG
5314	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5315		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5316	      && ! (src != 0
5317		    && GET_CODE (src) == SUBREG
5318		    && GET_MODE (src) == GET_MODE (p->exp)
5319		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5320			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5321	    continue;
5322
5323	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5324	    src = 0;
5325	  else if (src_folded && GET_CODE (src_folded) == code
5326		   && rtx_equal_p (src_folded, p->exp))
5327	    src_folded = 0;
5328	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5329		   && rtx_equal_p (src_eqv_here, p->exp))
5330	    src_eqv_here = 0;
5331	  else if (src_related && GET_CODE (src_related) == code
5332		   && rtx_equal_p (src_related, p->exp))
5333	    src_related = 0;
5334
5335	  /* This is the same as the destination of the insns, we want
5336	     to prefer it.  Copy it to src_related.  The code below will
5337	     then give it a negative cost.  */
5338	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5339	    src_related = dest;
5340	}
5341
5342      /* Find the cheapest valid equivalent, trying all the available
5343         possibilities.  Prefer items not in the hash table to ones
5344         that are when they are equal cost.  Note that we can never
5345         worsen an insn as the current contents will also succeed.
5346	 If we find an equivalent identical to the destination, use it as best,
5347	 since this insn will probably be eliminated in that case.  */
5348      if (src)
5349	{
5350	  if (rtx_equal_p (src, dest))
5351	    src_cost = src_regcost = -1;
5352	  else
5353	    {
5354	      src_cost = COST (src);
5355	      src_regcost = approx_reg_cost (src);
5356	    }
5357	}
5358
5359      if (src_eqv_here)
5360	{
5361	  if (rtx_equal_p (src_eqv_here, dest))
5362	    src_eqv_cost = src_eqv_regcost = -1;
5363	  else
5364	    {
5365	      src_eqv_cost = COST (src_eqv_here);
5366	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5367	    }
5368	}
5369
5370      if (src_folded)
5371	{
5372	  if (rtx_equal_p (src_folded, dest))
5373	    src_folded_cost = src_folded_regcost = -1;
5374	  else
5375	    {
5376	      src_folded_cost = COST (src_folded);
5377	      src_folded_regcost = approx_reg_cost (src_folded);
5378	    }
5379	}
5380
5381      if (src_related)
5382	{
5383	  if (rtx_equal_p (src_related, dest))
5384	    src_related_cost = src_related_regcost = -1;
5385	  else
5386	    {
5387	      src_related_cost = COST (src_related);
5388	      src_related_regcost = approx_reg_cost (src_related);
5389	    }
5390	}
5391
5392      /* If this was an indirect jump insn, a known label will really be
5393	 cheaper even though it looks more expensive.  */
5394      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5395	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5396
5397      /* Terminate loop when replacement made.  This must terminate since
5398         the current contents will be tested and will always be valid.  */
5399      while (1)
5400	{
5401	  rtx trial;
5402
5403	  /* Skip invalid entries.  */
5404	  while (elt && GET_CODE (elt->exp) != REG
5405		 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5406	    elt = elt->next_same_value;
5407
5408	  /* A paradoxical subreg would be bad here: it'll be the right
5409	     size, but later may be adjusted so that the upper bits aren't
5410	     what we want.  So reject it.  */
5411	  if (elt != 0
5412	      && GET_CODE (elt->exp) == SUBREG
5413	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5414		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5415	      /* It is okay, though, if the rtx we're trying to match
5416		 will ignore any of the bits we can't predict.  */
5417	      && ! (src != 0
5418		    && GET_CODE (src) == SUBREG
5419		    && GET_MODE (src) == GET_MODE (elt->exp)
5420		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5421			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5422	    {
5423	      elt = elt->next_same_value;
5424	      continue;
5425	    }
5426
5427	  if (elt)
5428	    {
5429	      src_elt_cost = elt->cost;
5430	      src_elt_regcost = elt->regcost;
5431	    }
5432
5433	  /* Find cheapest and skip it for the next time.   For items
5434	     of equal cost, use this order:
5435	     src_folded, src, src_eqv, src_related and hash table entry.  */
5436	  if (src_folded
5437	      && preferrable (src_folded_cost, src_folded_regcost,
5438			      src_cost, src_regcost) <= 0
5439	      && preferrable (src_folded_cost, src_folded_regcost,
5440			      src_eqv_cost, src_eqv_regcost) <= 0
5441	      && preferrable (src_folded_cost, src_folded_regcost,
5442			      src_related_cost, src_related_regcost) <= 0
5443	      && preferrable (src_folded_cost, src_folded_regcost,
5444			      src_elt_cost, src_elt_regcost) <= 0)
5445	    {
5446	      trial = src_folded, src_folded_cost = MAX_COST;
5447	      if (src_folded_force_flag)
5448		{
5449		  rtx forced = force_const_mem (mode, trial);
5450		  if (forced)
5451		    trial = forced;
5452		}
5453	    }
5454	  else if (src
5455		   && preferrable (src_cost, src_regcost,
5456				   src_eqv_cost, src_eqv_regcost) <= 0
5457		   && preferrable (src_cost, src_regcost,
5458				   src_related_cost, src_related_regcost) <= 0
5459		   && preferrable (src_cost, src_regcost,
5460				   src_elt_cost, src_elt_regcost) <= 0)
5461	    trial = src, src_cost = MAX_COST;
5462	  else if (src_eqv_here
5463		   && preferrable (src_eqv_cost, src_eqv_regcost,
5464				   src_related_cost, src_related_regcost) <= 0
5465		   && preferrable (src_eqv_cost, src_eqv_regcost,
5466				   src_elt_cost, src_elt_regcost) <= 0)
5467	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5468	  else if (src_related
5469		   && preferrable (src_related_cost, src_related_regcost,
5470				   src_elt_cost, src_elt_regcost) <= 0)
5471	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5472	  else
5473	    {
5474	      trial = copy_rtx (elt->exp);
5475	      elt = elt->next_same_value;
5476	      src_elt_cost = MAX_COST;
5477	    }
5478
5479	  /* We don't normally have an insn matching (set (pc) (pc)), so
5480	     check for this separately here.  We will delete such an
5481	     insn below.
5482
5483	     For other cases such as a table jump or conditional jump
5484	     where we know the ultimate target, go ahead and replace the
5485	     operand.  While that may not make a valid insn, we will
5486	     reemit the jump below (and also insert any necessary
5487	     barriers).  */
5488	  if (n_sets == 1 && dest == pc_rtx
5489	      && (trial == pc_rtx
5490		  || (GET_CODE (trial) == LABEL_REF
5491		      && ! condjump_p (insn))))
5492	    {
5493	      SET_SRC (sets[i].rtl) = trial;
5494	      cse_jumps_altered = 1;
5495	      break;
5496	    }
5497
5498	  /* Look for a substitution that makes a valid insn.  */
5499	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5500	    {
5501	      rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5502
5503	      /* If we just made a substitution inside a libcall, then we
5504		 need to make the same substitution in any notes attached
5505		 to the RETVAL insn.  */
5506	      if (libcall_insn
5507		  && (GET_CODE (sets[i].orig_src) == REG
5508		      || GET_CODE (sets[i].orig_src) == SUBREG
5509		      || GET_CODE (sets[i].orig_src) == MEM))
5510		simplify_replace_rtx (REG_NOTES (libcall_insn),
5511				      sets[i].orig_src, copy_rtx (new));
5512
5513	      /* The result of apply_change_group can be ignored; see
5514		 canon_reg.  */
5515
5516	      validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5517	      apply_change_group ();
5518	      break;
5519	    }
5520
5521	  /* If we previously found constant pool entries for
5522	     constants and this is a constant, try making a
5523	     pool entry.  Put it in src_folded unless we already have done
5524	     this since that is where it likely came from.  */
5525
5526	  else if (constant_pool_entries_cost
5527		   && CONSTANT_P (trial)
5528		   /* Reject cases that will abort in decode_rtx_const.
5529		      On the alpha when simplifying a switch, we get
5530		      (const (truncate (minus (label_ref) (label_ref)))).  */
5531		   && ! (GET_CODE (trial) == CONST
5532			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5533		   /* Likewise on IA-64, except without the truncate.  */
5534		   && ! (GET_CODE (trial) == CONST
5535			 && GET_CODE (XEXP (trial, 0)) == MINUS
5536			 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5537			 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5538		   && (src_folded == 0
5539		       || (GET_CODE (src_folded) != MEM
5540			   && ! src_folded_force_flag))
5541		   && GET_MODE_CLASS (mode) != MODE_CC
5542		   && mode != VOIDmode)
5543	    {
5544	      src_folded_force_flag = 1;
5545	      src_folded = trial;
5546	      src_folded_cost = constant_pool_entries_cost;
5547	      src_folded_regcost = constant_pool_entries_regcost;
5548	    }
5549	}
5550
5551      src = SET_SRC (sets[i].rtl);
5552
5553      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5554	 However, there is an important exception:  If both are registers
5555	 that are not the head of their equivalence class, replace SET_SRC
5556	 with the head of the class.  If we do not do this, we will have
5557	 both registers live over a portion of the basic block.  This way,
5558	 their lifetimes will likely abut instead of overlapping.  */
5559      if (GET_CODE (dest) == REG
5560	  && REGNO_QTY_VALID_P (REGNO (dest)))
5561	{
5562	  int dest_q = REG_QTY (REGNO (dest));
5563	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5564
5565	  if (dest_ent->mode == GET_MODE (dest)
5566	      && dest_ent->first_reg != REGNO (dest)
5567	      && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5568	      /* Don't do this if the original insn had a hard reg as
5569		 SET_SRC or SET_DEST.  */
5570	      && (GET_CODE (sets[i].src) != REG
5571		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5572	      && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5573	    /* We can't call canon_reg here because it won't do anything if
5574	       SRC is a hard register.  */
5575	    {
5576	      int src_q = REG_QTY (REGNO (src));
5577	      struct qty_table_elem *src_ent = &qty_table[src_q];
5578	      int first = src_ent->first_reg;
5579	      rtx new_src
5580		= (first >= FIRST_PSEUDO_REGISTER
5581		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5582
5583	      /* We must use validate-change even for this, because this
5584		 might be a special no-op instruction, suitable only to
5585		 tag notes onto.  */
5586	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5587		{
5588		  src = new_src;
5589		  /* If we had a constant that is cheaper than what we are now
5590		     setting SRC to, use that constant.  We ignored it when we
5591		     thought we could make this into a no-op.  */
5592		  if (src_const && COST (src_const) < COST (src)
5593		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5594					  src_const, 0))
5595		    src = src_const;
5596		}
5597	    }
5598	}
5599
5600      /* If we made a change, recompute SRC values.  */
5601      if (src != sets[i].src)
5602	{
5603	  cse_altered = 1;
5604	  do_not_record = 0;
5605	  hash_arg_in_memory = 0;
5606	  sets[i].src = src;
5607	  sets[i].src_hash = HASH (src, mode);
5608	  sets[i].src_volatile = do_not_record;
5609	  sets[i].src_in_memory = hash_arg_in_memory;
5610	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5611	}
5612
5613      /* If this is a single SET, we are setting a register, and we have an
5614	 equivalent constant, we want to add a REG_NOTE.   We don't want
5615	 to write a REG_EQUAL note for a constant pseudo since verifying that
5616	 that pseudo hasn't been eliminated is a pain.  Such a note also
5617	 won't help anything.
5618
5619	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5620	 which can be created for a reference to a compile time computable
5621	 entry in a jump table.  */
5622
5623      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5624	  && GET_CODE (src_const) != REG
5625	  && ! (GET_CODE (src_const) == CONST
5626		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5627		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5628		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5629	{
5630	  /* We only want a REG_EQUAL note if src_const != src.  */
5631	  if (! rtx_equal_p (src, src_const))
5632	    {
5633	      /* Make sure that the rtx is not shared.  */
5634	      src_const = copy_rtx (src_const);
5635
5636	      /* Record the actual constant value in a REG_EQUAL note,
5637		 making a new one if one does not already exist.  */
5638	      set_unique_reg_note (insn, REG_EQUAL, src_const);
5639	    }
5640	}
5641
5642      /* Now deal with the destination.  */
5643      do_not_record = 0;
5644
5645      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5646	 to the MEM or REG within it.  */
5647      while (GET_CODE (dest) == SIGN_EXTRACT
5648	     || GET_CODE (dest) == ZERO_EXTRACT
5649	     || GET_CODE (dest) == SUBREG
5650	     || GET_CODE (dest) == STRICT_LOW_PART)
5651	dest = XEXP (dest, 0);
5652
5653      sets[i].inner_dest = dest;
5654
5655      if (GET_CODE (dest) == MEM)
5656	{
5657#ifdef PUSH_ROUNDING
5658	  /* Stack pushes invalidate the stack pointer.  */
5659	  rtx addr = XEXP (dest, 0);
5660	  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5661	      && XEXP (addr, 0) == stack_pointer_rtx)
5662	    invalidate (stack_pointer_rtx, Pmode);
5663#endif
5664	  dest = fold_rtx (dest, insn);
5665	}
5666
5667      /* Compute the hash code of the destination now,
5668	 before the effects of this instruction are recorded,
5669	 since the register values used in the address computation
5670	 are those before this instruction.  */
5671      sets[i].dest_hash = HASH (dest, mode);
5672
5673      /* Don't enter a bit-field in the hash table
5674	 because the value in it after the store
5675	 may not equal what was stored, due to truncation.  */
5676
5677      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5678	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5679	{
5680	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5681
5682	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5683	      && GET_CODE (width) == CONST_INT
5684	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5685	      && ! (INTVAL (src_const)
5686		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5687	    /* Exception: if the value is constant,
5688	       and it won't be truncated, record it.  */
5689	    ;
5690	  else
5691	    {
5692	      /* This is chosen so that the destination will be invalidated
5693		 but no new value will be recorded.
5694		 We must invalidate because sometimes constant
5695		 values can be recorded for bitfields.  */
5696	      sets[i].src_elt = 0;
5697	      sets[i].src_volatile = 1;
5698	      src_eqv = 0;
5699	      src_eqv_elt = 0;
5700	    }
5701	}
5702
5703      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5704	 the insn.  */
5705      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5706	{
5707	  /* One less use of the label this insn used to jump to.  */
5708	  delete_insn (insn);
5709	  cse_jumps_altered = 1;
5710	  /* No more processing for this set.  */
5711	  sets[i].rtl = 0;
5712	}
5713
5714      /* If this SET is now setting PC to a label, we know it used to
5715	 be a conditional or computed branch.  */
5716      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5717	{
5718	  /* Now emit a BARRIER after the unconditional jump.  */
5719	  if (NEXT_INSN (insn) == 0
5720	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5721	    emit_barrier_after (insn);
5722
5723	  /* We reemit the jump in as many cases as possible just in
5724	     case the form of an unconditional jump is significantly
5725	     different than a computed jump or conditional jump.
5726
5727	     If this insn has multiple sets, then reemitting the
5728	     jump is nontrivial.  So instead we just force rerecognition
5729	     and hope for the best.  */
5730	  if (n_sets == 1)
5731	    {
5732	      rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5733
5734	      JUMP_LABEL (new) = XEXP (src, 0);
5735	      LABEL_NUSES (XEXP (src, 0))++;
5736	      delete_insn (insn);
5737	      insn = new;
5738
5739	      /* Now emit a BARRIER after the unconditional jump.  */
5740	      if (NEXT_INSN (insn) == 0
5741		  || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5742		emit_barrier_after (insn);
5743	    }
5744	  else
5745	    INSN_CODE (insn) = -1;
5746
5747	  never_reached_warning (insn, NULL);
5748
5749	  /* Do not bother deleting any unreachable code,
5750	     let jump/flow do that.  */
5751
5752	  cse_jumps_altered = 1;
5753	  sets[i].rtl = 0;
5754	}
5755
5756      /* If destination is volatile, invalidate it and then do no further
5757	 processing for this assignment.  */
5758
5759      else if (do_not_record)
5760	{
5761	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5762	    invalidate (dest, VOIDmode);
5763	  else if (GET_CODE (dest) == MEM)
5764	    {
5765	      /* Outgoing arguments for a libcall don't
5766		 affect any recorded expressions.  */
5767	      if (! libcall_insn || insn == libcall_insn)
5768		invalidate (dest, VOIDmode);
5769	    }
5770	  else if (GET_CODE (dest) == STRICT_LOW_PART
5771		   || GET_CODE (dest) == ZERO_EXTRACT)
5772	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5773	  sets[i].rtl = 0;
5774	}
5775
5776      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5777	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5778
5779#ifdef HAVE_cc0
5780      /* If setting CC0, record what it was set to, or a constant, if it
5781	 is equivalent to a constant.  If it is being set to a floating-point
5782	 value, make a COMPARE with the appropriate constant of 0.  If we
5783	 don't do this, later code can interpret this as a test against
5784	 const0_rtx, which can cause problems if we try to put it into an
5785	 insn as a floating-point operand.  */
5786      if (dest == cc0_rtx)
5787	{
5788	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5789	  this_insn_cc0_mode = mode;
5790	  if (FLOAT_MODE_P (mode))
5791	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5792					     CONST0_RTX (mode));
5793	}
5794#endif
5795    }
5796
5797  /* Now enter all non-volatile source expressions in the hash table
5798     if they are not already present.
5799     Record their equivalence classes in src_elt.
5800     This way we can insert the corresponding destinations into
5801     the same classes even if the actual sources are no longer in them
5802     (having been invalidated).  */
5803
5804  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5805      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5806    {
5807      struct table_elt *elt;
5808      struct table_elt *classp = sets[0].src_elt;
5809      rtx dest = SET_DEST (sets[0].rtl);
5810      enum machine_mode eqvmode = GET_MODE (dest);
5811
5812      if (GET_CODE (dest) == STRICT_LOW_PART)
5813	{
5814	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5815	  classp = 0;
5816	}
5817      if (insert_regs (src_eqv, classp, 0))
5818	{
5819	  rehash_using_reg (src_eqv);
5820	  src_eqv_hash = HASH (src_eqv, eqvmode);
5821	}
5822      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5823      elt->in_memory = src_eqv_in_memory;
5824      src_eqv_elt = elt;
5825
5826      /* Check to see if src_eqv_elt is the same as a set source which
5827	 does not yet have an elt, and if so set the elt of the set source
5828	 to src_eqv_elt.  */
5829      for (i = 0; i < n_sets; i++)
5830	if (sets[i].rtl && sets[i].src_elt == 0
5831	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5832	  sets[i].src_elt = src_eqv_elt;
5833    }
5834
5835  for (i = 0; i < n_sets; i++)
5836    if (sets[i].rtl && ! sets[i].src_volatile
5837	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5838      {
5839	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5840	  {
5841	    /* REG_EQUAL in setting a STRICT_LOW_PART
5842	       gives an equivalent for the entire destination register,
5843	       not just for the subreg being stored in now.
5844	       This is a more interesting equivalence, so we arrange later
5845	       to treat the entire reg as the destination.  */
5846	    sets[i].src_elt = src_eqv_elt;
5847	    sets[i].src_hash = src_eqv_hash;
5848	  }
5849	else
5850	  {
5851	    /* Insert source and constant equivalent into hash table, if not
5852	       already present.  */
5853	    struct table_elt *classp = src_eqv_elt;
5854	    rtx src = sets[i].src;
5855	    rtx dest = SET_DEST (sets[i].rtl);
5856	    enum machine_mode mode
5857	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5858
5859	    /* It's possible that we have a source value known to be
5860	       constant but don't have a REG_EQUAL note on the insn.
5861	       Lack of a note will mean src_eqv_elt will be NULL.  This
5862	       can happen where we've generated a SUBREG to access a
5863	       CONST_INT that is already in a register in a wider mode.
5864	       Ensure that the source expression is put in the proper
5865	       constant class.  */
5866	    if (!classp)
5867	      classp = sets[i].src_const_elt;
5868
5869	    if (sets[i].src_elt == 0)
5870	      {
5871		/* Don't put a hard register source into the table if this is
5872		   the last insn of a libcall.  In this case, we only need
5873		   to put src_eqv_elt in src_elt.  */
5874		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5875		  {
5876		    struct table_elt *elt;
5877
5878		    /* Note that these insert_regs calls cannot remove
5879		       any of the src_elt's, because they would have failed to
5880		       match if not still valid.  */
5881		    if (insert_regs (src, classp, 0))
5882		      {
5883			rehash_using_reg (src);
5884			sets[i].src_hash = HASH (src, mode);
5885		      }
5886		    elt = insert (src, classp, sets[i].src_hash, mode);
5887		    elt->in_memory = sets[i].src_in_memory;
5888		    sets[i].src_elt = classp = elt;
5889		  }
5890		else
5891		  sets[i].src_elt = classp;
5892	      }
5893	    if (sets[i].src_const && sets[i].src_const_elt == 0
5894		&& src != sets[i].src_const
5895		&& ! rtx_equal_p (sets[i].src_const, src))
5896	      sets[i].src_elt = insert (sets[i].src_const, classp,
5897					sets[i].src_const_hash, mode);
5898	  }
5899      }
5900    else if (sets[i].src_elt == 0)
5901      /* If we did not insert the source into the hash table (e.g., it was
5902	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5903	 so that the destination goes into that class.  */
5904      sets[i].src_elt = src_eqv_elt;
5905
5906  invalidate_from_clobbers (x);
5907
5908  /* Some registers are invalidated by subroutine calls.  Memory is
5909     invalidated by non-constant calls.  */
5910
5911  if (GET_CODE (insn) == CALL_INSN)
5912    {
5913      if (! CONST_OR_PURE_CALL_P (insn))
5914	invalidate_memory ();
5915      invalidate_for_call ();
5916    }
5917
5918  /* Now invalidate everything set by this instruction.
5919     If a SUBREG or other funny destination is being set,
5920     sets[i].rtl is still nonzero, so here we invalidate the reg
5921     a part of which is being set.  */
5922
5923  for (i = 0; i < n_sets; i++)
5924    if (sets[i].rtl)
5925      {
5926	/* We can't use the inner dest, because the mode associated with
5927	   a ZERO_EXTRACT is significant.  */
5928	rtx dest = SET_DEST (sets[i].rtl);
5929
5930	/* Needed for registers to remove the register from its
5931	   previous quantity's chain.
5932	   Needed for memory if this is a nonvarying address, unless
5933	   we have just done an invalidate_memory that covers even those.  */
5934	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5935	  invalidate (dest, VOIDmode);
5936	else if (GET_CODE (dest) == MEM)
5937	  {
5938	    /* Outgoing arguments for a libcall don't
5939	       affect any recorded expressions.  */
5940	    if (! libcall_insn || insn == libcall_insn)
5941	      invalidate (dest, VOIDmode);
5942	  }
5943	else if (GET_CODE (dest) == STRICT_LOW_PART
5944		 || GET_CODE (dest) == ZERO_EXTRACT)
5945	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5946      }
5947
5948  /* A volatile ASM invalidates everything.  */
5949  if (GET_CODE (insn) == INSN
5950      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5951      && MEM_VOLATILE_P (PATTERN (insn)))
5952    flush_hash_table ();
5953
5954  /* Make sure registers mentioned in destinations
5955     are safe for use in an expression to be inserted.
5956     This removes from the hash table
5957     any invalid entry that refers to one of these registers.
5958
5959     We don't care about the return value from mention_regs because
5960     we are going to hash the SET_DEST values unconditionally.  */
5961
5962  for (i = 0; i < n_sets; i++)
5963    {
5964      if (sets[i].rtl)
5965	{
5966	  rtx x = SET_DEST (sets[i].rtl);
5967
5968	  if (GET_CODE (x) != REG)
5969	    mention_regs (x);
5970	  else
5971	    {
5972	      /* We used to rely on all references to a register becoming
5973		 inaccessible when a register changes to a new quantity,
5974		 since that changes the hash code.  However, that is not
5975		 safe, since after HASH_SIZE new quantities we get a
5976		 hash 'collision' of a register with its own invalid
5977		 entries.  And since SUBREGs have been changed not to
5978		 change their hash code with the hash code of the register,
5979		 it wouldn't work any longer at all.  So we have to check
5980		 for any invalid references lying around now.
5981		 This code is similar to the REG case in mention_regs,
5982		 but it knows that reg_tick has been incremented, and
5983		 it leaves reg_in_table as -1 .  */
5984	      unsigned int regno = REGNO (x);
5985	      unsigned int endregno
5986		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5987			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5988	      unsigned int i;
5989
5990	      for (i = regno; i < endregno; i++)
5991		{
5992		  if (REG_IN_TABLE (i) >= 0)
5993		    {
5994		      remove_invalid_refs (i);
5995		      REG_IN_TABLE (i) = -1;
5996		    }
5997		}
5998	    }
5999	}
6000    }
6001
6002  /* We may have just removed some of the src_elt's from the hash table.
6003     So replace each one with the current head of the same class.  */
6004
6005  for (i = 0; i < n_sets; i++)
6006    if (sets[i].rtl)
6007      {
6008	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6009	  /* If elt was removed, find current head of same class,
6010	     or 0 if nothing remains of that class.  */
6011	  {
6012	    struct table_elt *elt = sets[i].src_elt;
6013
6014	    while (elt && elt->prev_same_value)
6015	      elt = elt->prev_same_value;
6016
6017	    while (elt && elt->first_same_value == 0)
6018	      elt = elt->next_same_value;
6019	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6020	  }
6021      }
6022
6023  /* Now insert the destinations into their equivalence classes.  */
6024
6025  for (i = 0; i < n_sets; i++)
6026    if (sets[i].rtl)
6027      {
6028	rtx dest = SET_DEST (sets[i].rtl);
6029	rtx inner_dest = sets[i].inner_dest;
6030	struct table_elt *elt;
6031
6032	/* Don't record value if we are not supposed to risk allocating
6033	   floating-point values in registers that might be wider than
6034	   memory.  */
6035	if ((flag_float_store
6036	     && GET_CODE (dest) == MEM
6037	     && FLOAT_MODE_P (GET_MODE (dest)))
6038	    /* Don't record BLKmode values, because we don't know the
6039	       size of it, and can't be sure that other BLKmode values
6040	       have the same or smaller size.  */
6041	    || GET_MODE (dest) == BLKmode
6042	    /* Don't record values of destinations set inside a libcall block
6043	       since we might delete the libcall.  Things should have been set
6044	       up so we won't want to reuse such a value, but we play it safe
6045	       here.  */
6046	    || libcall_insn
6047	    /* If we didn't put a REG_EQUAL value or a source into the hash
6048	       table, there is no point is recording DEST.  */
6049	    || sets[i].src_elt == 0
6050	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6051	       or SIGN_EXTEND, don't record DEST since it can cause
6052	       some tracking to be wrong.
6053
6054	       ??? Think about this more later.  */
6055	    || (GET_CODE (dest) == SUBREG
6056		&& (GET_MODE_SIZE (GET_MODE (dest))
6057		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6058		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6059		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6060	  continue;
6061
6062	/* STRICT_LOW_PART isn't part of the value BEING set,
6063	   and neither is the SUBREG inside it.
6064	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6065	if (GET_CODE (dest) == STRICT_LOW_PART)
6066	  dest = SUBREG_REG (XEXP (dest, 0));
6067
6068	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6069	  /* Registers must also be inserted into chains for quantities.  */
6070	  if (insert_regs (dest, sets[i].src_elt, 1))
6071	    {
6072	      /* If `insert_regs' changes something, the hash code must be
6073		 recalculated.  */
6074	      rehash_using_reg (dest);
6075	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6076	    }
6077
6078	if (GET_CODE (inner_dest) == MEM
6079	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6080	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6081	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
6082	     Consider the case in which the address of the MEM is
6083	     passed to a function, which alters the MEM.  Then, if we
6084	     later use Y instead of the MEM we'll miss the update.  */
6085	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6086	else
6087	  elt = insert (dest, sets[i].src_elt,
6088			sets[i].dest_hash, GET_MODE (dest));
6089
6090	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6091			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6092			      || fixed_base_plus_p (XEXP (sets[i].inner_dest,
6093							  0))));
6094
6095	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6096	   narrower than M2, and both M1 and M2 are the same number of words,
6097	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6098	   make that equivalence as well.
6099
6100	   However, BAR may have equivalences for which gen_lowpart_if_possible
6101	   will produce a simpler value than gen_lowpart_if_possible applied to
6102	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6103	   BAR's equivalences.  If we don't get a simplified form, make
6104	   the SUBREG.  It will not be used in an equivalence, but will
6105	   cause two similar assignments to be detected.
6106
6107	   Note the loop below will find SUBREG_REG (DEST) since we have
6108	   already entered SRC and DEST of the SET in the table.  */
6109
6110	if (GET_CODE (dest) == SUBREG
6111	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6112		 / UNITS_PER_WORD)
6113		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6114	    && (GET_MODE_SIZE (GET_MODE (dest))
6115		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6116	    && sets[i].src_elt != 0)
6117	  {
6118	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6119	    struct table_elt *elt, *classp = 0;
6120
6121	    for (elt = sets[i].src_elt->first_same_value; elt;
6122		 elt = elt->next_same_value)
6123	      {
6124		rtx new_src = 0;
6125		unsigned src_hash;
6126		struct table_elt *src_elt;
6127		int byte = 0;
6128
6129		/* Ignore invalid entries.  */
6130		if (GET_CODE (elt->exp) != REG
6131		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6132		  continue;
6133
6134		/* We may have already been playing subreg games.  If the
6135		   mode is already correct for the destination, use it.  */
6136		if (GET_MODE (elt->exp) == new_mode)
6137		  new_src = elt->exp;
6138		else
6139		  {
6140		    /* Calculate big endian correction for the SUBREG_BYTE.
6141		       We have already checked that M1 (GET_MODE (dest))
6142		       is not narrower than M2 (new_mode).  */
6143		    if (BYTES_BIG_ENDIAN)
6144		      byte = (GET_MODE_SIZE (GET_MODE (dest))
6145			      - GET_MODE_SIZE (new_mode));
6146
6147		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6148					           GET_MODE (dest), byte);
6149		  }
6150
6151		/* The call to simplify_gen_subreg fails if the value
6152		   is VOIDmode, yet we can't do any simplification, e.g.
6153		   for EXPR_LISTs denoting function call results.
6154		   It is invalid to construct a SUBREG with a VOIDmode
6155		   SUBREG_REG, hence a zero new_src means we can't do
6156		   this substitution.  */
6157		if (! new_src)
6158		  continue;
6159
6160		src_hash = HASH (new_src, new_mode);
6161		src_elt = lookup (new_src, src_hash, new_mode);
6162
6163		/* Put the new source in the hash table is if isn't
6164		   already.  */
6165		if (src_elt == 0)
6166		  {
6167		    if (insert_regs (new_src, classp, 0))
6168		      {
6169			rehash_using_reg (new_src);
6170			src_hash = HASH (new_src, new_mode);
6171		      }
6172		    src_elt = insert (new_src, classp, src_hash, new_mode);
6173		    src_elt->in_memory = elt->in_memory;
6174		  }
6175		else if (classp && classp != src_elt->first_same_value)
6176		  /* Show that two things that we've seen before are
6177		     actually the same.  */
6178		  merge_equiv_classes (src_elt, classp);
6179
6180		classp = src_elt->first_same_value;
6181		/* Ignore invalid entries.  */
6182		while (classp
6183		       && GET_CODE (classp->exp) != REG
6184		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6185		  classp = classp->next_same_value;
6186	      }
6187	  }
6188      }
6189
6190  /* Special handling for (set REG0 REG1) where REG0 is the
6191     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6192     be used in the sequel, so (if easily done) change this insn to
6193     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6194     that computed their value.  Then REG1 will become a dead store
6195     and won't cloud the situation for later optimizations.
6196
6197     Do not make this change if REG1 is a hard register, because it will
6198     then be used in the sequel and we may be changing a two-operand insn
6199     into a three-operand insn.
6200
6201     Also do not do this if we are operating on a copy of INSN.
6202
6203     Also don't do this if INSN ends a libcall; this would cause an unrelated
6204     register to be set in the middle of a libcall, and we then get bad code
6205     if the libcall is deleted.  */
6206
6207  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6208      && NEXT_INSN (PREV_INSN (insn)) == insn
6209      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6210      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6211      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6212    {
6213      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6214      struct qty_table_elem *src_ent = &qty_table[src_q];
6215
6216      if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6217	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6218	{
6219	  rtx prev = insn;
6220	  /* Scan for the previous nonnote insn, but stop at a basic
6221	     block boundary.  */
6222	  do
6223	    {
6224	      prev = PREV_INSN (prev);
6225	    }
6226	  while (prev && GET_CODE (prev) == NOTE
6227		 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6228
6229	  /* Do not swap the registers around if the previous instruction
6230	     attaches a REG_EQUIV note to REG1.
6231
6232	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6233	     from the pseudo that originally shadowed an incoming argument
6234	     to another register.  Some uses of REG_EQUIV might rely on it
6235	     being attached to REG1 rather than REG2.
6236
6237	     This section previously turned the REG_EQUIV into a REG_EQUAL
6238	     note.  We cannot do that because REG_EQUIV may provide an
6239	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
6240
6241	  if (prev != 0 && GET_CODE (prev) == INSN
6242	      && GET_CODE (PATTERN (prev)) == SET
6243	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6244	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6245	    {
6246	      rtx dest = SET_DEST (sets[0].rtl);
6247	      rtx src = SET_SRC (sets[0].rtl);
6248	      rtx note;
6249
6250	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6251	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6252	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6253	      apply_change_group ();
6254
6255	      /* If INSN has a REG_EQUAL note, and this note mentions
6256		 REG0, then we must delete it, because the value in
6257		 REG0 has changed.  If the note's value is REG1, we must
6258		 also delete it because that is now this insn's dest.  */
6259	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6260	      if (note != 0
6261		  && (reg_mentioned_p (dest, XEXP (note, 0))
6262		      || rtx_equal_p (src, XEXP (note, 0))))
6263		remove_note (insn, note);
6264	    }
6265	}
6266    }
6267
6268  /* If this is a conditional jump insn, record any known equivalences due to
6269     the condition being tested.  */
6270
6271  last_jump_equiv_class = 0;
6272  if (GET_CODE (insn) == JUMP_INSN
6273      && n_sets == 1 && GET_CODE (x) == SET
6274      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6275    record_jump_equiv (insn, 0);
6276
6277#ifdef HAVE_cc0
6278  /* If the previous insn set CC0 and this insn no longer references CC0,
6279     delete the previous insn.  Here we use the fact that nothing expects CC0
6280     to be valid over an insn, which is true until the final pass.  */
6281  if (prev_insn && GET_CODE (prev_insn) == INSN
6282      && (tem = single_set (prev_insn)) != 0
6283      && SET_DEST (tem) == cc0_rtx
6284      && ! reg_mentioned_p (cc0_rtx, x))
6285    delete_insn (prev_insn);
6286
6287  prev_insn_cc0 = this_insn_cc0;
6288  prev_insn_cc0_mode = this_insn_cc0_mode;
6289  prev_insn = insn;
6290#endif
6291}
6292
6293/* Remove from the hash table all expressions that reference memory.  */
6294
6295static void
6296invalidate_memory (void)
6297{
6298  int i;
6299  struct table_elt *p, *next;
6300
6301  for (i = 0; i < HASH_SIZE; i++)
6302    for (p = table[i]; p; p = next)
6303      {
6304	next = p->next_same_hash;
6305	if (p->in_memory)
6306	  remove_from_table (p, i);
6307      }
6308}
6309
6310/* If ADDR is an address that implicitly affects the stack pointer, return
6311   1 and update the register tables to show the effect.  Else, return 0.  */
6312
6313static int
6314addr_affects_sp_p (rtx addr)
6315{
6316  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6317      && GET_CODE (XEXP (addr, 0)) == REG
6318      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6319    {
6320      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6321	{
6322	  REG_TICK (STACK_POINTER_REGNUM)++;
6323	  /* Is it possible to use a subreg of SP?  */
6324	  SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6325	}
6326
6327      /* This should be *very* rare.  */
6328      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6329	invalidate (stack_pointer_rtx, VOIDmode);
6330
6331      return 1;
6332    }
6333
6334  return 0;
6335}
6336
6337/* Perform invalidation on the basis of everything about an insn
6338   except for invalidating the actual places that are SET in it.
6339   This includes the places CLOBBERed, and anything that might
6340   alias with something that is SET or CLOBBERed.
6341
6342   X is the pattern of the insn.  */
6343
6344static void
6345invalidate_from_clobbers (rtx x)
6346{
6347  if (GET_CODE (x) == CLOBBER)
6348    {
6349      rtx ref = XEXP (x, 0);
6350      if (ref)
6351	{
6352	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6353	      || GET_CODE (ref) == MEM)
6354	    invalidate (ref, VOIDmode);
6355	  else if (GET_CODE (ref) == STRICT_LOW_PART
6356		   || GET_CODE (ref) == ZERO_EXTRACT)
6357	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6358	}
6359    }
6360  else if (GET_CODE (x) == PARALLEL)
6361    {
6362      int i;
6363      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6364	{
6365	  rtx y = XVECEXP (x, 0, i);
6366	  if (GET_CODE (y) == CLOBBER)
6367	    {
6368	      rtx ref = XEXP (y, 0);
6369	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6370		  || GET_CODE (ref) == MEM)
6371		invalidate (ref, VOIDmode);
6372	      else if (GET_CODE (ref) == STRICT_LOW_PART
6373		       || GET_CODE (ref) == ZERO_EXTRACT)
6374		invalidate (XEXP (ref, 0), GET_MODE (ref));
6375	    }
6376	}
6377    }
6378}
6379
6380/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6381   and replace any registers in them with either an equivalent constant
6382   or the canonical form of the register.  If we are inside an address,
6383   only do this if the address remains valid.
6384
6385   OBJECT is 0 except when within a MEM in which case it is the MEM.
6386
6387   Return the replacement for X.  */
6388
6389static rtx
6390cse_process_notes (rtx x, rtx object)
6391{
6392  enum rtx_code code = GET_CODE (x);
6393  const char *fmt = GET_RTX_FORMAT (code);
6394  int i;
6395
6396  switch (code)
6397    {
6398    case CONST_INT:
6399    case CONST:
6400    case SYMBOL_REF:
6401    case LABEL_REF:
6402    case CONST_DOUBLE:
6403    case CONST_VECTOR:
6404    case PC:
6405    case CC0:
6406    case LO_SUM:
6407      return x;
6408
6409    case MEM:
6410      validate_change (x, &XEXP (x, 0),
6411		       cse_process_notes (XEXP (x, 0), x), 0);
6412      return x;
6413
6414    case EXPR_LIST:
6415    case INSN_LIST:
6416      if (REG_NOTE_KIND (x) == REG_EQUAL)
6417	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6418      if (XEXP (x, 1))
6419	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6420      return x;
6421
6422    case SIGN_EXTEND:
6423    case ZERO_EXTEND:
6424    case SUBREG:
6425      {
6426	rtx new = cse_process_notes (XEXP (x, 0), object);
6427	/* We don't substitute VOIDmode constants into these rtx,
6428	   since they would impede folding.  */
6429	if (GET_MODE (new) != VOIDmode)
6430	  validate_change (object, &XEXP (x, 0), new, 0);
6431	return x;
6432      }
6433
6434    case REG:
6435      i = REG_QTY (REGNO (x));
6436
6437      /* Return a constant or a constant register.  */
6438      if (REGNO_QTY_VALID_P (REGNO (x)))
6439	{
6440	  struct qty_table_elem *ent = &qty_table[i];
6441
6442	  if (ent->const_rtx != NULL_RTX
6443	      && (CONSTANT_P (ent->const_rtx)
6444		  || GET_CODE (ent->const_rtx) == REG))
6445	    {
6446	      rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6447	      if (new)
6448		return new;
6449	    }
6450	}
6451
6452      /* Otherwise, canonicalize this register.  */
6453      return canon_reg (x, NULL_RTX);
6454
6455    default:
6456      break;
6457    }
6458
6459  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6460    if (fmt[i] == 'e')
6461      validate_change (object, &XEXP (x, i),
6462		       cse_process_notes (XEXP (x, i), object), 0);
6463
6464  return x;
6465}
6466
6467/* Find common subexpressions between the end test of a loop and the beginning
6468   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
6469
6470   Often we have a loop where an expression in the exit test is used
6471   in the body of the loop.  For example "while (*p) *q++ = *p++;".
6472   Because of the way we duplicate the loop exit test in front of the loop,
6473   however, we don't detect that common subexpression.  This will be caught
6474   when global cse is implemented, but this is a quite common case.
6475
6476   This function handles the most common cases of these common expressions.
6477   It is called after we have processed the basic block ending with the
6478   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6479   jumps to a label used only once.  */
6480
6481static void
6482cse_around_loop (rtx loop_start)
6483{
6484  rtx insn;
6485  int i;
6486  struct table_elt *p;
6487
6488  /* If the jump at the end of the loop doesn't go to the start, we don't
6489     do anything.  */
6490  for (insn = PREV_INSN (loop_start);
6491       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6492       insn = PREV_INSN (insn))
6493    ;
6494
6495  if (insn == 0
6496      || GET_CODE (insn) != NOTE
6497      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6498    return;
6499
6500  /* If the last insn of the loop (the end test) was an NE comparison,
6501     we will interpret it as an EQ comparison, since we fell through
6502     the loop.  Any equivalences resulting from that comparison are
6503     therefore not valid and must be invalidated.  */
6504  if (last_jump_equiv_class)
6505    for (p = last_jump_equiv_class->first_same_value; p;
6506	 p = p->next_same_value)
6507      {
6508	if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6509	    || (GET_CODE (p->exp) == SUBREG
6510		&& GET_CODE (SUBREG_REG (p->exp)) == REG))
6511	  invalidate (p->exp, VOIDmode);
6512	else if (GET_CODE (p->exp) == STRICT_LOW_PART
6513		 || GET_CODE (p->exp) == ZERO_EXTRACT)
6514	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6515      }
6516
6517  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6518     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6519
6520     The only thing we do with SET_DEST is invalidate entries, so we
6521     can safely process each SET in order.  It is slightly less efficient
6522     to do so, but we only want to handle the most common cases.
6523
6524     The gen_move_insn call in cse_set_around_loop may create new pseudos.
6525     These pseudos won't have valid entries in any of the tables indexed
6526     by register number, such as reg_qty.  We avoid out-of-range array
6527     accesses by not processing any instructions created after cse started.  */
6528
6529  for (insn = NEXT_INSN (loop_start);
6530       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6531       && INSN_UID (insn) < max_insn_uid
6532       && ! (GET_CODE (insn) == NOTE
6533	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6534       insn = NEXT_INSN (insn))
6535    {
6536      if (INSN_P (insn)
6537	  && (GET_CODE (PATTERN (insn)) == SET
6538	      || GET_CODE (PATTERN (insn)) == CLOBBER))
6539	cse_set_around_loop (PATTERN (insn), insn, loop_start);
6540      else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6541	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6542	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6543	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6544	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6545				 loop_start);
6546    }
6547}
6548
6549/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6550   since they are done elsewhere.  This function is called via note_stores.  */
6551
6552static void
6553invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6554{
6555  enum rtx_code code = GET_CODE (dest);
6556
6557  if (code == MEM
6558      && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6559      /* There are times when an address can appear varying and be a PLUS
6560	 during this scan when it would be a fixed address were we to know
6561	 the proper equivalences.  So invalidate all memory if there is
6562	 a BLKmode or nonscalar memory reference or a reference to a
6563	 variable address.  */
6564      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6565	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6566    {
6567      invalidate_memory ();
6568      return;
6569    }
6570
6571  if (GET_CODE (set) == CLOBBER
6572      || CC0_P (dest)
6573      || dest == pc_rtx)
6574    return;
6575
6576  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6577    invalidate (XEXP (dest, 0), GET_MODE (dest));
6578  else if (code == REG || code == SUBREG || code == MEM)
6579    invalidate (dest, VOIDmode);
6580}
6581
6582/* Invalidate all insns from START up to the end of the function or the
6583   next label.  This called when we wish to CSE around a block that is
6584   conditionally executed.  */
6585
6586static void
6587invalidate_skipped_block (rtx start)
6588{
6589  rtx insn;
6590
6591  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6592       insn = NEXT_INSN (insn))
6593    {
6594      if (! INSN_P (insn))
6595	continue;
6596
6597      if (GET_CODE (insn) == CALL_INSN)
6598	{
6599	  if (! CONST_OR_PURE_CALL_P (insn))
6600	    invalidate_memory ();
6601	  invalidate_for_call ();
6602	}
6603
6604      invalidate_from_clobbers (PATTERN (insn));
6605      note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6606    }
6607}
6608
6609/* If modifying X will modify the value in *DATA (which is really an
6610   `rtx *'), indicate that fact by setting the pointed to value to
6611   NULL_RTX.  */
6612
6613static void
6614cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
6615{
6616  rtx *cse_check_loop_start_value = (rtx *) data;
6617
6618  if (*cse_check_loop_start_value == NULL_RTX
6619      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6620    return;
6621
6622  if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6623      || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6624    *cse_check_loop_start_value = NULL_RTX;
6625}
6626
6627/* X is a SET or CLOBBER contained in INSN that was found near the start of
6628   a loop that starts with the label at LOOP_START.
6629
6630   If X is a SET, we see if its SET_SRC is currently in our hash table.
6631   If so, we see if it has a value equal to some register used only in the
6632   loop exit code (as marked by jump.c).
6633
6634   If those two conditions are true, we search backwards from the start of
6635   the loop to see if that same value was loaded into a register that still
6636   retains its value at the start of the loop.
6637
6638   If so, we insert an insn after the load to copy the destination of that
6639   load into the equivalent register and (try to) replace our SET_SRC with that
6640   register.
6641
6642   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
6643
6644static void
6645cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
6646{
6647  struct table_elt *src_elt;
6648
6649  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6650     are setting PC or CC0 or whose SET_SRC is already a register.  */
6651  if (GET_CODE (x) == SET
6652      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6653      && GET_CODE (SET_SRC (x)) != REG)
6654    {
6655      src_elt = lookup (SET_SRC (x),
6656			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6657			GET_MODE (SET_DEST (x)));
6658
6659      if (src_elt)
6660	for (src_elt = src_elt->first_same_value; src_elt;
6661	     src_elt = src_elt->next_same_value)
6662	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6663	      && COST (src_elt->exp) < COST (SET_SRC (x)))
6664	    {
6665	      rtx p, set;
6666
6667	      /* Look for an insn in front of LOOP_START that sets
6668		 something in the desired mode to SET_SRC (x) before we hit
6669		 a label or CALL_INSN.  */
6670
6671	      for (p = prev_nonnote_insn (loop_start);
6672		   p && GET_CODE (p) != CALL_INSN
6673		   && GET_CODE (p) != CODE_LABEL;
6674		   p = prev_nonnote_insn  (p))
6675		if ((set = single_set (p)) != 0
6676		    && GET_CODE (SET_DEST (set)) == REG
6677		    && GET_MODE (SET_DEST (set)) == src_elt->mode
6678		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6679		  {
6680		    /* We now have to ensure that nothing between P
6681		       and LOOP_START modified anything referenced in
6682		       SET_SRC (x).  We know that nothing within the loop
6683		       can modify it, or we would have invalidated it in
6684		       the hash table.  */
6685		    rtx q;
6686		    rtx cse_check_loop_start_value = SET_SRC (x);
6687		    for (q = p; q != loop_start; q = NEXT_INSN (q))
6688		      if (INSN_P (q))
6689			note_stores (PATTERN (q),
6690				     cse_check_loop_start,
6691				     &cse_check_loop_start_value);
6692
6693		    /* If nothing was changed and we can replace our
6694		       SET_SRC, add an insn after P to copy its destination
6695		       to what we will be replacing SET_SRC with.  */
6696		    if (cse_check_loop_start_value
6697			&& single_set (p)
6698			&& !can_throw_internal (insn)
6699			&& validate_change (insn, &SET_SRC (x),
6700					    src_elt->exp, 0))
6701		      {
6702			/* If this creates new pseudos, this is unsafe,
6703			   because the regno of new pseudo is unsuitable
6704			   to index into reg_qty when cse_insn processes
6705			   the new insn.  Therefore, if a new pseudo was
6706			   created, discard this optimization.  */
6707			int nregs = max_reg_num ();
6708			rtx move
6709			  = gen_move_insn (src_elt->exp, SET_DEST (set));
6710			if (nregs != max_reg_num ())
6711			  {
6712			    if (! validate_change (insn, &SET_SRC (x),
6713						   SET_SRC (set), 0))
6714			      abort ();
6715			  }
6716			else
6717			  {
6718			    if (CONSTANT_P (SET_SRC (set))
6719				&& ! find_reg_equal_equiv_note (insn))
6720			      set_unique_reg_note (insn, REG_EQUAL,
6721						   SET_SRC (set));
6722			    if (control_flow_insn_p (p))
6723			      /* p can cause a control flow transfer so it
6724				 is the last insn of a basic block.  We can't
6725				 therefore use emit_insn_after.  */
6726			      emit_insn_before (move, next_nonnote_insn (p));
6727			    else
6728			      emit_insn_after (move, p);
6729			  }
6730		      }
6731		    break;
6732		  }
6733	    }
6734    }
6735
6736  /* Deal with the destination of X affecting the stack pointer.  */
6737  addr_affects_sp_p (SET_DEST (x));
6738
6739  /* See comment on similar code in cse_insn for explanation of these
6740     tests.  */
6741  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6742      || GET_CODE (SET_DEST (x)) == MEM)
6743    invalidate (SET_DEST (x), VOIDmode);
6744  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6745	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6746    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6747}
6748
6749/* Find the end of INSN's basic block and return its range,
6750   the total number of SETs in all the insns of the block, the last insn of the
6751   block, and the branch path.
6752
6753   The branch path indicates which branches should be followed.  If a nonzero
6754   path size is specified, the block should be rescanned and a different set
6755   of branches will be taken.  The branch path is only used if
6756   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6757
6758   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6759   used to describe the block.  It is filled in with the information about
6760   the current block.  The incoming structure's branch path, if any, is used
6761   to construct the output branch path.  */
6762
6763void
6764cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6765			int follow_jumps, int after_loop, int skip_blocks)
6766{
6767  rtx p = insn, q;
6768  int nsets = 0;
6769  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6770  rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6771  int path_size = data->path_size;
6772  int path_entry = 0;
6773  int i;
6774
6775  /* Update the previous branch path, if any.  If the last branch was
6776     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
6777     shorten the path by one and look at the previous branch.  We know that
6778     at least one branch must have been taken if PATH_SIZE is nonzero.  */
6779  while (path_size > 0)
6780    {
6781      if (data->path[path_size - 1].status != NOT_TAKEN)
6782	{
6783	  data->path[path_size - 1].status = NOT_TAKEN;
6784	  break;
6785	}
6786      else
6787	path_size--;
6788    }
6789
6790  /* If the first instruction is marked with QImode, that means we've
6791     already processed this block.  Our caller will look at DATA->LAST
6792     to figure out where to go next.  We want to return the next block
6793     in the instruction stream, not some branched-to block somewhere
6794     else.  We accomplish this by pretending our called forbid us to
6795     follow jumps, or skip blocks.  */
6796  if (GET_MODE (insn) == QImode)
6797    follow_jumps = skip_blocks = 0;
6798
6799  /* Scan to end of this basic block.  */
6800  while (p && GET_CODE (p) != CODE_LABEL)
6801    {
6802      /* Don't cse out the end of a loop.  This makes a difference
6803	 only for the unusual loops that always execute at least once;
6804	 all other loops have labels there so we will stop in any case.
6805	 Cse'ing out the end of the loop is dangerous because it
6806	 might cause an invariant expression inside the loop
6807	 to be reused after the end of the loop.  This would make it
6808	 hard to move the expression out of the loop in loop.c,
6809	 especially if it is one of several equivalent expressions
6810	 and loop.c would like to eliminate it.
6811
6812	 If we are running after loop.c has finished, we can ignore
6813	 the NOTE_INSN_LOOP_END.  */
6814
6815      if (! after_loop && GET_CODE (p) == NOTE
6816	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6817	break;
6818
6819      /* Don't cse over a call to setjmp; on some machines (eg VAX)
6820	 the regs restored by the longjmp come from
6821	 a later time than the setjmp.  */
6822      if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6823	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6824	break;
6825
6826      /* A PARALLEL can have lots of SETs in it,
6827	 especially if it is really an ASM_OPERANDS.  */
6828      if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6829	nsets += XVECLEN (PATTERN (p), 0);
6830      else if (GET_CODE (p) != NOTE)
6831	nsets += 1;
6832
6833      /* Ignore insns made by CSE; they cannot affect the boundaries of
6834	 the basic block.  */
6835
6836      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6837	high_cuid = INSN_CUID (p);
6838      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6839	low_cuid = INSN_CUID (p);
6840
6841      /* See if this insn is in our branch path.  If it is and we are to
6842	 take it, do so.  */
6843      if (path_entry < path_size && data->path[path_entry].branch == p)
6844	{
6845	  if (data->path[path_entry].status != NOT_TAKEN)
6846	    p = JUMP_LABEL (p);
6847
6848	  /* Point to next entry in path, if any.  */
6849	  path_entry++;
6850	}
6851
6852      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6853	 was specified, we haven't reached our maximum path length, there are
6854	 insns following the target of the jump, this is the only use of the
6855	 jump label, and the target label is preceded by a BARRIER.
6856
6857	 Alternatively, we can follow the jump if it branches around a
6858	 block of code and there are no other branches into the block.
6859	 In this case invalidate_skipped_block will be called to invalidate any
6860	 registers set in the block when following the jump.  */
6861
6862      else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6863	       && GET_CODE (p) == JUMP_INSN
6864	       && GET_CODE (PATTERN (p)) == SET
6865	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6866	       && JUMP_LABEL (p) != 0
6867	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6868	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6869	{
6870	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6871	    if ((GET_CODE (q) != NOTE
6872		 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6873		 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6874		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6875		&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6876	      break;
6877
6878	  /* If we ran into a BARRIER, this code is an extension of the
6879	     basic block when the branch is taken.  */
6880	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6881	    {
6882	      /* Don't allow ourself to keep walking around an
6883		 always-executed loop.  */
6884	      if (next_real_insn (q) == next)
6885		{
6886		  p = NEXT_INSN (p);
6887		  continue;
6888		}
6889
6890	      /* Similarly, don't put a branch in our path more than once.  */
6891	      for (i = 0; i < path_entry; i++)
6892		if (data->path[i].branch == p)
6893		  break;
6894
6895	      if (i != path_entry)
6896		break;
6897
6898	      data->path[path_entry].branch = p;
6899	      data->path[path_entry++].status = TAKEN;
6900
6901	      /* This branch now ends our path.  It was possible that we
6902		 didn't see this branch the last time around (when the
6903		 insn in front of the target was a JUMP_INSN that was
6904		 turned into a no-op).  */
6905	      path_size = path_entry;
6906
6907	      p = JUMP_LABEL (p);
6908	      /* Mark block so we won't scan it again later.  */
6909	      PUT_MODE (NEXT_INSN (p), QImode);
6910	    }
6911	  /* Detect a branch around a block of code.  */
6912	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6913	    {
6914	      rtx tmp;
6915
6916	      if (next_real_insn (q) == next)
6917		{
6918		  p = NEXT_INSN (p);
6919		  continue;
6920		}
6921
6922	      for (i = 0; i < path_entry; i++)
6923		if (data->path[i].branch == p)
6924		  break;
6925
6926	      if (i != path_entry)
6927		break;
6928
6929	      /* This is no_labels_between_p (p, q) with an added check for
6930		 reaching the end of a function (in case Q precedes P).  */
6931	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6932		if (GET_CODE (tmp) == CODE_LABEL)
6933		  break;
6934
6935	      if (tmp == q)
6936		{
6937		  data->path[path_entry].branch = p;
6938		  data->path[path_entry++].status = AROUND;
6939
6940		  path_size = path_entry;
6941
6942		  p = JUMP_LABEL (p);
6943		  /* Mark block so we won't scan it again later.  */
6944		  PUT_MODE (NEXT_INSN (p), QImode);
6945		}
6946	    }
6947	}
6948      p = NEXT_INSN (p);
6949    }
6950
6951  data->low_cuid = low_cuid;
6952  data->high_cuid = high_cuid;
6953  data->nsets = nsets;
6954  data->last = p;
6955
6956  /* If all jumps in the path are not taken, set our path length to zero
6957     so a rescan won't be done.  */
6958  for (i = path_size - 1; i >= 0; i--)
6959    if (data->path[i].status != NOT_TAKEN)
6960      break;
6961
6962  if (i == -1)
6963    data->path_size = 0;
6964  else
6965    data->path_size = path_size;
6966
6967  /* End the current branch path.  */
6968  data->path[path_size].branch = 0;
6969}
6970
6971/* Perform cse on the instructions of a function.
6972   F is the first instruction.
6973   NREGS is one plus the highest pseudo-reg number used in the instruction.
6974
6975   AFTER_LOOP is 1 if this is the cse call done after loop optimization
6976   (only if -frerun-cse-after-loop).
6977
6978   Returns 1 if jump_optimize should be redone due to simplifications
6979   in conditional jump instructions.  */
6980
6981int
6982cse_main (rtx f, int nregs, int after_loop, FILE *file)
6983{
6984  struct cse_basic_block_data val;
6985  rtx insn = f;
6986  int i;
6987
6988  val.path = xmalloc (sizeof (struct branch_path)
6989		      * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6990
6991  cse_jumps_altered = 0;
6992  recorded_label_ref = 0;
6993  constant_pool_entries_cost = 0;
6994  constant_pool_entries_regcost = 0;
6995  val.path_size = 0;
6996
6997  init_recog ();
6998  init_alias_analysis ();
6999
7000  max_reg = nregs;
7001
7002  max_insn_uid = get_max_uid ();
7003
7004  reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7005
7006#ifdef LOAD_EXTEND_OP
7007
7008  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
7009     and change the code and mode as appropriate.  */
7010  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7011#endif
7012
7013  /* Reset the counter indicating how many elements have been made
7014     thus far.  */
7015  n_elements_made = 0;
7016
7017  /* Find the largest uid.  */
7018
7019  max_uid = get_max_uid ();
7020  uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7021
7022  /* Compute the mapping from uids to cuids.
7023     CUIDs are numbers assigned to insns, like uids,
7024     except that cuids increase monotonically through the code.
7025     Don't assign cuids to line-number NOTEs, so that the distance in cuids
7026     between two insns is not affected by -g.  */
7027
7028  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7029    {
7030      if (GET_CODE (insn) != NOTE
7031	  || NOTE_LINE_NUMBER (insn) < 0)
7032	INSN_CUID (insn) = ++i;
7033      else
7034	/* Give a line number note the same cuid as preceding insn.  */
7035	INSN_CUID (insn) = i;
7036    }
7037
7038  ggc_push_context ();
7039
7040  /* Loop over basic blocks.
7041     Compute the maximum number of qty's needed for each basic block
7042     (which is 2 for each SET).  */
7043  insn = f;
7044  while (insn)
7045    {
7046      cse_altered = 0;
7047      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7048			      flag_cse_skip_blocks);
7049
7050      /* If this basic block was already processed or has no sets, skip it.  */
7051      if (val.nsets == 0 || GET_MODE (insn) == QImode)
7052	{
7053	  PUT_MODE (insn, VOIDmode);
7054	  insn = (val.last ? NEXT_INSN (val.last) : 0);
7055	  val.path_size = 0;
7056	  continue;
7057	}
7058
7059      cse_basic_block_start = val.low_cuid;
7060      cse_basic_block_end = val.high_cuid;
7061      max_qty = val.nsets * 2;
7062
7063      if (file)
7064	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7065		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7066		 val.nsets);
7067
7068      /* Make MAX_QTY bigger to give us room to optimize
7069	 past the end of this basic block, if that should prove useful.  */
7070      if (max_qty < 500)
7071	max_qty = 500;
7072
7073      max_qty += max_reg;
7074
7075      /* If this basic block is being extended by following certain jumps,
7076         (see `cse_end_of_basic_block'), we reprocess the code from the start.
7077         Otherwise, we start after this basic block.  */
7078      if (val.path_size > 0)
7079	cse_basic_block (insn, val.last, val.path, 0);
7080      else
7081	{
7082	  int old_cse_jumps_altered = cse_jumps_altered;
7083	  rtx temp;
7084
7085	  /* When cse changes a conditional jump to an unconditional
7086	     jump, we want to reprocess the block, since it will give
7087	     us a new branch path to investigate.  */
7088	  cse_jumps_altered = 0;
7089	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7090	  if (cse_jumps_altered == 0
7091	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7092	    insn = temp;
7093
7094	  cse_jumps_altered |= old_cse_jumps_altered;
7095	}
7096
7097      if (cse_altered)
7098	ggc_collect ();
7099
7100#ifdef USE_C_ALLOCA
7101      alloca (0);
7102#endif
7103    }
7104
7105  ggc_pop_context ();
7106
7107  if (max_elements_made < n_elements_made)
7108    max_elements_made = n_elements_made;
7109
7110  /* Clean up.  */
7111  end_alias_analysis ();
7112  free (uid_cuid);
7113  free (reg_eqv_table);
7114  free (val.path);
7115
7116  return cse_jumps_altered || recorded_label_ref;
7117}
7118
7119/* Process a single basic block.  FROM and TO and the limits of the basic
7120   block.  NEXT_BRANCH points to the branch path when following jumps or
7121   a null path when not following jumps.
7122
7123   AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7124   loop.  This is true when we are being called for the last time on a
7125   block and this CSE pass is before loop.c.  */
7126
7127static rtx
7128cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7129		 int around_loop)
7130{
7131  rtx insn;
7132  int to_usage = 0;
7133  rtx libcall_insn = NULL_RTX;
7134  int num_insns = 0;
7135  int no_conflict = 0;
7136
7137  /* This array is undefined before max_reg, so only allocate
7138     the space actually needed and adjust the start.  */
7139
7140  qty_table = xmalloc ((max_qty - max_reg) * sizeof (struct qty_table_elem));
7141  qty_table -= max_reg;
7142
7143  new_basic_block ();
7144
7145  /* TO might be a label.  If so, protect it from being deleted.  */
7146  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7147    ++LABEL_NUSES (to);
7148
7149  for (insn = from; insn != to; insn = NEXT_INSN (insn))
7150    {
7151      enum rtx_code code = GET_CODE (insn);
7152
7153      /* If we have processed 1,000 insns, flush the hash table to
7154	 avoid extreme quadratic behavior.  We must not include NOTEs
7155	 in the count since there may be more of them when generating
7156	 debugging information.  If we clear the table at different
7157	 times, code generated with -g -O might be different than code
7158	 generated with -O but not -g.
7159
7160	 ??? This is a real kludge and needs to be done some other way.
7161	 Perhaps for 2.9.  */
7162      if (code != NOTE && num_insns++ > 1000)
7163	{
7164	  flush_hash_table ();
7165	  num_insns = 0;
7166	}
7167
7168      /* See if this is a branch that is part of the path.  If so, and it is
7169	 to be taken, do so.  */
7170      if (next_branch->branch == insn)
7171	{
7172	  enum taken status = next_branch++->status;
7173	  if (status != NOT_TAKEN)
7174	    {
7175	      if (status == TAKEN)
7176		record_jump_equiv (insn, 1);
7177	      else
7178		invalidate_skipped_block (NEXT_INSN (insn));
7179
7180	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7181		 Then follow this branch.  */
7182#ifdef HAVE_cc0
7183	      prev_insn_cc0 = 0;
7184	      prev_insn = insn;
7185#endif
7186	      insn = JUMP_LABEL (insn);
7187	      continue;
7188	    }
7189	}
7190
7191      if (GET_MODE (insn) == QImode)
7192	PUT_MODE (insn, VOIDmode);
7193
7194      if (GET_RTX_CLASS (code) == 'i')
7195	{
7196	  rtx p;
7197
7198	  /* Process notes first so we have all notes in canonical forms when
7199	     looking for duplicate operations.  */
7200
7201	  if (REG_NOTES (insn))
7202	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7203
7204	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7205	     we do not want to record destinations.  The last insn of a
7206	     LIBCALL block is not considered to be part of the block, since
7207	     its destination is the result of the block and hence should be
7208	     recorded.  */
7209
7210	  if (REG_NOTES (insn) != 0)
7211	    {
7212	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7213		libcall_insn = XEXP (p, 0);
7214	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7215		{
7216		  /* Keep libcall_insn for the last SET insn of a no-conflict
7217		     block to prevent changing the destination.  */
7218		  if (! no_conflict)
7219		    libcall_insn = 0;
7220		  else
7221		    no_conflict = -1;
7222		}
7223	      else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7224		no_conflict = 1;
7225	    }
7226
7227	  cse_insn (insn, libcall_insn);
7228
7229	  if (no_conflict == -1)
7230	    {
7231	      libcall_insn = 0;
7232	      no_conflict = 0;
7233	    }
7234
7235	  /* If we haven't already found an insn where we added a LABEL_REF,
7236	     check this one.  */
7237	  if (GET_CODE (insn) == INSN && ! recorded_label_ref
7238	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7239			       (void *) insn))
7240	    recorded_label_ref = 1;
7241	}
7242
7243      /* If INSN is now an unconditional jump, skip to the end of our
7244	 basic block by pretending that we just did the last insn in the
7245	 basic block.  If we are jumping to the end of our block, show
7246	 that we can have one usage of TO.  */
7247
7248      if (any_uncondjump_p (insn))
7249	{
7250	  if (to == 0)
7251	    {
7252	      free (qty_table + max_reg);
7253	      return 0;
7254	    }
7255
7256	  if (JUMP_LABEL (insn) == to)
7257	    to_usage = 1;
7258
7259	  /* Maybe TO was deleted because the jump is unconditional.
7260	     If so, there is nothing left in this basic block.  */
7261	  /* ??? Perhaps it would be smarter to set TO
7262	     to whatever follows this insn,
7263	     and pretend the basic block had always ended here.  */
7264	  if (INSN_DELETED_P (to))
7265	    break;
7266
7267	  insn = PREV_INSN (to);
7268	}
7269
7270      /* See if it is ok to keep on going past the label
7271	 which used to end our basic block.  Remember that we incremented
7272	 the count of that label, so we decrement it here.  If we made
7273	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7274	 want to count the use in that jump.  */
7275
7276      if (to != 0 && NEXT_INSN (insn) == to
7277	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7278	{
7279	  struct cse_basic_block_data val;
7280	  rtx prev;
7281
7282	  insn = NEXT_INSN (to);
7283
7284	  /* If TO was the last insn in the function, we are done.  */
7285	  if (insn == 0)
7286	    {
7287	      free (qty_table + max_reg);
7288	      return 0;
7289	    }
7290
7291	  /* If TO was preceded by a BARRIER we are done with this block
7292	     because it has no continuation.  */
7293	  prev = prev_nonnote_insn (to);
7294	  if (prev && GET_CODE (prev) == BARRIER)
7295	    {
7296	      free (qty_table + max_reg);
7297	      return insn;
7298	    }
7299
7300	  /* Find the end of the following block.  Note that we won't be
7301	     following branches in this case.  */
7302	  to_usage = 0;
7303	  val.path_size = 0;
7304	  val.path = xmalloc (sizeof (struct branch_path)
7305			      * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7306	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
7307	  free (val.path);
7308
7309	  /* If the tables we allocated have enough space left
7310	     to handle all the SETs in the next basic block,
7311	     continue through it.  Otherwise, return,
7312	     and that block will be scanned individually.  */
7313	  if (val.nsets * 2 + next_qty > max_qty)
7314	    break;
7315
7316	  cse_basic_block_start = val.low_cuid;
7317	  cse_basic_block_end = val.high_cuid;
7318	  to = val.last;
7319
7320	  /* Prevent TO from being deleted if it is a label.  */
7321	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7322	    ++LABEL_NUSES (to);
7323
7324	  /* Back up so we process the first insn in the extension.  */
7325	  insn = PREV_INSN (insn);
7326	}
7327    }
7328
7329  if (next_qty > max_qty)
7330    abort ();
7331
7332  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7333     the previous insn is the only insn that branches to the head of a loop,
7334     we can cse into the loop.  Don't do this if we changed the jump
7335     structure of a loop unless we aren't going to be following jumps.  */
7336
7337  insn = prev_nonnote_insn (to);
7338  if ((cse_jumps_altered == 0
7339       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7340      && around_loop && to != 0
7341      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7342      && GET_CODE (insn) == JUMP_INSN
7343      && JUMP_LABEL (insn) != 0
7344      && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7345    cse_around_loop (JUMP_LABEL (insn));
7346
7347  free (qty_table + max_reg);
7348
7349  return to ? NEXT_INSN (to) : 0;
7350}
7351
7352/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7353   there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7354
7355static int
7356check_for_label_ref (rtx *rtl, void *data)
7357{
7358  rtx insn = (rtx) data;
7359
7360  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7361     we must rerun jump since it needs to place the note.  If this is a
7362     LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7363     since no REG_LABEL will be added.  */
7364  return (GET_CODE (*rtl) == LABEL_REF
7365	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7366	  && LABEL_P (XEXP (*rtl, 0))
7367	  && INSN_UID (XEXP (*rtl, 0)) != 0
7368	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7369}
7370
7371/* Count the number of times registers are used (not set) in X.
7372   COUNTS is an array in which we accumulate the count, INCR is how much
7373   we count each register usage.  */
7374
7375static void
7376count_reg_usage (rtx x, int *counts, int incr)
7377{
7378  enum rtx_code code;
7379  rtx note;
7380  const char *fmt;
7381  int i, j;
7382
7383  if (x == 0)
7384    return;
7385
7386  switch (code = GET_CODE (x))
7387    {
7388    case REG:
7389      counts[REGNO (x)] += incr;
7390      return;
7391
7392    case PC:
7393    case CC0:
7394    case CONST:
7395    case CONST_INT:
7396    case CONST_DOUBLE:
7397    case CONST_VECTOR:
7398    case SYMBOL_REF:
7399    case LABEL_REF:
7400      return;
7401
7402    case CLOBBER:
7403      /* If we are clobbering a MEM, mark any registers inside the address
7404         as being used.  */
7405      if (GET_CODE (XEXP (x, 0)) == MEM)
7406	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7407      return;
7408
7409    case SET:
7410      /* Unless we are setting a REG, count everything in SET_DEST.  */
7411      if (GET_CODE (SET_DEST (x)) != REG)
7412	count_reg_usage (SET_DEST (x), counts, incr);
7413      count_reg_usage (SET_SRC (x), counts, incr);
7414      return;
7415
7416    case CALL_INSN:
7417      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7418      /* Fall through.  */
7419
7420    case INSN:
7421    case JUMP_INSN:
7422      count_reg_usage (PATTERN (x), counts, incr);
7423
7424      /* Things used in a REG_EQUAL note aren't dead since loop may try to
7425	 use them.  */
7426
7427      note = find_reg_equal_equiv_note (x);
7428      if (note)
7429	{
7430	  rtx eqv = XEXP (note, 0);
7431
7432	  if (GET_CODE (eqv) == EXPR_LIST)
7433	  /* This REG_EQUAL note describes the result of a function call.
7434	     Process all the arguments.  */
7435	    do
7436	      {
7437		count_reg_usage (XEXP (eqv, 0), counts, incr);
7438		eqv = XEXP (eqv, 1);
7439	      }
7440	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
7441	  else
7442	    count_reg_usage (eqv, counts, incr);
7443	}
7444      return;
7445
7446    case EXPR_LIST:
7447      if (REG_NOTE_KIND (x) == REG_EQUAL
7448	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7449	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7450	     involving registers in the address.  */
7451	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
7452	count_reg_usage (XEXP (x, 0), counts, incr);
7453
7454      count_reg_usage (XEXP (x, 1), counts, incr);
7455      return;
7456
7457    case ASM_OPERANDS:
7458      /* Iterate over just the inputs, not the constraints as well.  */
7459      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7460	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7461      return;
7462
7463    case INSN_LIST:
7464      abort ();
7465
7466    default:
7467      break;
7468    }
7469
7470  fmt = GET_RTX_FORMAT (code);
7471  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7472    {
7473      if (fmt[i] == 'e')
7474	count_reg_usage (XEXP (x, i), counts, incr);
7475      else if (fmt[i] == 'E')
7476	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7477	  count_reg_usage (XVECEXP (x, i, j), counts, incr);
7478    }
7479}
7480
7481/* Return true if set is live.  */
7482static bool
7483set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
7484	    int *counts)
7485{
7486#ifdef HAVE_cc0
7487  rtx tem;
7488#endif
7489
7490  if (set_noop_p (set))
7491    ;
7492
7493#ifdef HAVE_cc0
7494  else if (GET_CODE (SET_DEST (set)) == CC0
7495	   && !side_effects_p (SET_SRC (set))
7496	   && ((tem = next_nonnote_insn (insn)) == 0
7497	       || !INSN_P (tem)
7498	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7499    return false;
7500#endif
7501  else if (GET_CODE (SET_DEST (set)) != REG
7502	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7503	   || counts[REGNO (SET_DEST (set))] != 0
7504	   || side_effects_p (SET_SRC (set))
7505	   /* An ADDRESSOF expression can turn into a use of the
7506	      internal arg pointer, so always consider the
7507	      internal arg pointer live.  If it is truly dead,
7508	      flow will delete the initializing insn.  */
7509	   || (SET_DEST (set) == current_function_internal_arg_pointer))
7510    return true;
7511  return false;
7512}
7513
7514/* Return true if insn is live.  */
7515
7516static bool
7517insn_live_p (rtx insn, int *counts)
7518{
7519  int i;
7520  if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7521    return true;
7522  else if (GET_CODE (PATTERN (insn)) == SET)
7523    return set_live_p (PATTERN (insn), insn, counts);
7524  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7525    {
7526      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7527	{
7528	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7529
7530	  if (GET_CODE (elt) == SET)
7531	    {
7532	      if (set_live_p (elt, insn, counts))
7533		return true;
7534	    }
7535	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7536	    return true;
7537	}
7538      return false;
7539    }
7540  else
7541    return true;
7542}
7543
7544/* Return true if libcall is dead as a whole.  */
7545
7546static bool
7547dead_libcall_p (rtx insn, int *counts)
7548{
7549  rtx note, set, new;
7550
7551  /* See if there's a REG_EQUAL note on this insn and try to
7552     replace the source with the REG_EQUAL expression.
7553
7554     We assume that insns with REG_RETVALs can only be reg->reg
7555     copies at this point.  */
7556  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7557  if (!note)
7558    return false;
7559
7560  set = single_set (insn);
7561  if (!set)
7562    return false;
7563
7564  new = simplify_rtx (XEXP (note, 0));
7565  if (!new)
7566    new = XEXP (note, 0);
7567
7568  /* While changing insn, we must update the counts accordingly.  */
7569  count_reg_usage (insn, counts, -1);
7570
7571  if (validate_change (insn, &SET_SRC (set), new, 0))
7572    {
7573      count_reg_usage (insn, counts, 1);
7574      remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7575      remove_note (insn, note);
7576      return true;
7577    }
7578
7579  if (CONSTANT_P (new))
7580    {
7581      new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7582      if (new && validate_change (insn, &SET_SRC (set), new, 0))
7583	{
7584	  count_reg_usage (insn, counts, 1);
7585	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7586	  remove_note (insn, note);
7587	  return true;
7588	}
7589    }
7590
7591  count_reg_usage (insn, counts, 1);
7592  return false;
7593}
7594
7595/* Scan all the insns and delete any that are dead; i.e., they store a register
7596   that is never used or they copy a register to itself.
7597
7598   This is used to remove insns made obviously dead by cse, loop or other
7599   optimizations.  It improves the heuristics in loop since it won't try to
7600   move dead invariants out of loops or make givs for dead quantities.  The
7601   remaining passes of the compilation are also sped up.  */
7602
7603int
7604delete_trivially_dead_insns (rtx insns, int nreg)
7605{
7606  int *counts;
7607  rtx insn, prev;
7608  int in_libcall = 0, dead_libcall = 0;
7609  int ndead = 0, nlastdead, niterations = 0;
7610
7611  timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7612  /* First count the number of times each register is used.  */
7613  counts = xcalloc (nreg, sizeof (int));
7614  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7615    count_reg_usage (insn, counts, 1);
7616
7617  do
7618    {
7619      nlastdead = ndead;
7620      niterations++;
7621      /* Go from the last insn to the first and delete insns that only set unused
7622	 registers or copy a register to itself.  As we delete an insn, remove
7623	 usage counts for registers it uses.
7624
7625	 The first jump optimization pass may leave a real insn as the last
7626	 insn in the function.   We must not skip that insn or we may end
7627	 up deleting code that is not really dead.  */
7628      insn = get_last_insn ();
7629      if (! INSN_P (insn))
7630	insn = prev_real_insn (insn);
7631
7632      for (; insn; insn = prev)
7633	{
7634	  int live_insn = 0;
7635
7636	  prev = prev_real_insn (insn);
7637
7638	  /* Don't delete any insns that are part of a libcall block unless
7639	     we can delete the whole libcall block.
7640
7641	     Flow or loop might get confused if we did that.  Remember
7642	     that we are scanning backwards.  */
7643	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7644	    {
7645	      in_libcall = 1;
7646	      live_insn = 1;
7647	      dead_libcall = dead_libcall_p (insn, counts);
7648	    }
7649	  else if (in_libcall)
7650	    live_insn = ! dead_libcall;
7651	  else
7652	    live_insn = insn_live_p (insn, counts);
7653
7654	  /* If this is a dead insn, delete it and show registers in it aren't
7655	     being used.  */
7656
7657	  if (! live_insn)
7658	    {
7659	      count_reg_usage (insn, counts, -1);
7660	      delete_insn_and_edges (insn);
7661	      ndead++;
7662	    }
7663
7664	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7665	    {
7666	      in_libcall = 0;
7667	      dead_libcall = 0;
7668	    }
7669	}
7670    }
7671  while (ndead != nlastdead);
7672
7673  if (rtl_dump_file && ndead)
7674    fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7675	     ndead, niterations);
7676  /* Clean up.  */
7677  free (counts);
7678  timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7679  return ndead;
7680}
7681
7682/* This function is called via for_each_rtx.  The argument, NEWREG, is
7683   a condition code register with the desired mode.  If we are looking
7684   at the same register in a different mode, replace it with
7685   NEWREG.  */
7686
7687static int
7688cse_change_cc_mode (rtx *loc, void *data)
7689{
7690  rtx newreg = (rtx) data;
7691
7692  if (*loc
7693      && GET_CODE (*loc) == REG
7694      && REGNO (*loc) == REGNO (newreg)
7695      && GET_MODE (*loc) != GET_MODE (newreg))
7696    {
7697      *loc = newreg;
7698      return -1;
7699    }
7700  return 0;
7701}
7702
7703/* Change the mode of any reference to the register REGNO (NEWREG) to
7704   GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7705   any instruction which modifies NEWREG.  */
7706
7707static void
7708cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7709{
7710  rtx insn;
7711
7712  for (insn = start; insn != end; insn = NEXT_INSN (insn))
7713    {
7714      if (! INSN_P (insn))
7715	continue;
7716
7717      if (reg_set_p (newreg, insn))
7718	return;
7719
7720      for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7721      for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, newreg);
7722    }
7723}
7724
7725/* BB is a basic block which finishes with CC_REG as a condition code
7726   register which is set to CC_SRC.  Look through the successors of BB
7727   to find blocks which have a single predecessor (i.e., this one),
7728   and look through those blocks for an assignment to CC_REG which is
7729   equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7730   permitted to change the mode of CC_SRC to a compatible mode.  This
7731   returns VOIDmode if no equivalent assignments were found.
7732   Otherwise it returns the mode which CC_SRC should wind up with.
7733
7734   The main complexity in this function is handling the mode issues.
7735   We may have more than one duplicate which we can eliminate, and we
7736   try to find a mode which will work for multiple duplicates.  */
7737
7738static enum machine_mode
7739cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7740{
7741  bool found_equiv;
7742  enum machine_mode mode;
7743  unsigned int insn_count;
7744  edge e;
7745  rtx insns[2];
7746  enum machine_mode modes[2];
7747  rtx last_insns[2];
7748  unsigned int i;
7749  rtx newreg;
7750
7751  /* We expect to have two successors.  Look at both before picking
7752     the final mode for the comparison.  If we have more successors
7753     (i.e., some sort of table jump, although that seems unlikely),
7754     then we require all beyond the first two to use the same
7755     mode.  */
7756
7757  found_equiv = false;
7758  mode = GET_MODE (cc_src);
7759  insn_count = 0;
7760  for (e = bb->succ; e; e = e->succ_next)
7761    {
7762      rtx insn;
7763      rtx end;
7764
7765      if (e->flags & EDGE_COMPLEX)
7766	continue;
7767
7768      if (! e->dest->pred
7769	  || e->dest->pred->pred_next
7770	  || e->dest == EXIT_BLOCK_PTR)
7771	continue;
7772
7773      end = NEXT_INSN (BB_END (e->dest));
7774      for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7775	{
7776	  rtx set;
7777
7778	  if (! INSN_P (insn))
7779	    continue;
7780
7781	  /* If CC_SRC is modified, we have to stop looking for
7782	     something which uses it.  */
7783	  if (modified_in_p (cc_src, insn))
7784	    break;
7785
7786	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7787	  set = single_set (insn);
7788	  if (set
7789	      && GET_CODE (SET_DEST (set)) == REG
7790	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7791	    {
7792	      bool found;
7793	      enum machine_mode set_mode;
7794	      enum machine_mode comp_mode;
7795
7796	      found = false;
7797	      set_mode = GET_MODE (SET_SRC (set));
7798	      comp_mode = set_mode;
7799	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7800		found = true;
7801	      else if (GET_CODE (cc_src) == COMPARE
7802		       && GET_CODE (SET_SRC (set)) == COMPARE
7803		       && mode != set_mode
7804		       && rtx_equal_p (XEXP (cc_src, 0),
7805				       XEXP (SET_SRC (set), 0))
7806		       && rtx_equal_p (XEXP (cc_src, 1),
7807				       XEXP (SET_SRC (set), 1)))
7808
7809		{
7810		  comp_mode = (*targetm.cc_modes_compatible) (mode, set_mode);
7811		  if (comp_mode != VOIDmode
7812		      && (can_change_mode || comp_mode == mode))
7813		    found = true;
7814		}
7815
7816	      if (found)
7817		{
7818		  found_equiv = true;
7819		  if (insn_count < ARRAY_SIZE (insns))
7820		    {
7821		      insns[insn_count] = insn;
7822		      modes[insn_count] = set_mode;
7823		      last_insns[insn_count] = end;
7824		      ++insn_count;
7825
7826		      if (mode != comp_mode)
7827			{
7828			  if (! can_change_mode)
7829			    abort ();
7830			  mode = comp_mode;
7831			  PUT_MODE (cc_src, mode);
7832			}
7833		    }
7834		  else
7835		    {
7836		      if (set_mode != mode)
7837			{
7838			  /* We found a matching expression in the
7839			     wrong mode, but we don't have room to
7840			     store it in the array.  Punt.  This case
7841			     should be rare.  */
7842			  break;
7843			}
7844		      /* INSN sets CC_REG to a value equal to CC_SRC
7845			 with the right mode.  We can simply delete
7846			 it.  */
7847		      delete_insn (insn);
7848		    }
7849
7850		  /* We found an instruction to delete.  Keep looking,
7851		     in the hopes of finding a three-way jump.  */
7852		  continue;
7853		}
7854
7855	      /* We found an instruction which sets the condition
7856		 code, so don't look any farther.  */
7857	      break;
7858	    }
7859
7860	  /* If INSN sets CC_REG in some other way, don't look any
7861	     farther.  */
7862	  if (reg_set_p (cc_reg, insn))
7863	    break;
7864	}
7865
7866      /* If we fell off the bottom of the block, we can keep looking
7867	 through successors.  We pass CAN_CHANGE_MODE as false because
7868	 we aren't prepared to handle compatibility between the
7869	 further blocks and this block.  */
7870      if (insn == end)
7871	{
7872	  enum machine_mode submode;
7873
7874	  submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7875	  if (submode != VOIDmode)
7876	    {
7877	      if (submode != mode)
7878		abort ();
7879	      found_equiv = true;
7880	      can_change_mode = false;
7881	    }
7882	}
7883    }
7884
7885  if (! found_equiv)
7886    return VOIDmode;
7887
7888  /* Now INSN_COUNT is the number of instructions we found which set
7889     CC_REG to a value equivalent to CC_SRC.  The instructions are in
7890     INSNS.  The modes used by those instructions are in MODES.  */
7891
7892  newreg = NULL_RTX;
7893  for (i = 0; i < insn_count; ++i)
7894    {
7895      if (modes[i] != mode)
7896	{
7897	  /* We need to change the mode of CC_REG in INSNS[i] and
7898	     subsequent instructions.  */
7899	  if (! newreg)
7900	    {
7901	      if (GET_MODE (cc_reg) == mode)
7902		newreg = cc_reg;
7903	      else
7904		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7905	    }
7906	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7907				    newreg);
7908	}
7909
7910      delete_insn (insns[i]);
7911    }
7912
7913  return mode;
7914}
7915
7916/* If we have a fixed condition code register (or two), walk through
7917   the instructions and try to eliminate duplicate assignments.  */
7918
7919void
7920cse_condition_code_reg (void)
7921{
7922  unsigned int cc_regno_1;
7923  unsigned int cc_regno_2;
7924  rtx cc_reg_1;
7925  rtx cc_reg_2;
7926  basic_block bb;
7927
7928  if (! (*targetm.fixed_condition_code_regs) (&cc_regno_1, &cc_regno_2))
7929    return;
7930
7931  cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7932  if (cc_regno_2 != INVALID_REGNUM)
7933    cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7934  else
7935    cc_reg_2 = NULL_RTX;
7936
7937  FOR_EACH_BB (bb)
7938    {
7939      rtx last_insn;
7940      rtx cc_reg;
7941      rtx insn;
7942      rtx cc_src_insn;
7943      rtx cc_src;
7944      enum machine_mode mode;
7945      enum machine_mode orig_mode;
7946
7947      /* Look for blocks which end with a conditional jump based on a
7948	 condition code register.  Then look for the instruction which
7949	 sets the condition code register.  Then look through the
7950	 successor blocks for instructions which set the condition
7951	 code register to the same value.  There are other possible
7952	 uses of the condition code register, but these are by far the
7953	 most common and the ones which we are most likely to be able
7954	 to optimize.  */
7955
7956      last_insn = BB_END (bb);
7957      if (GET_CODE (last_insn) != JUMP_INSN)
7958	continue;
7959
7960      if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7961	cc_reg = cc_reg_1;
7962      else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7963	cc_reg = cc_reg_2;
7964      else
7965	continue;
7966
7967      cc_src_insn = NULL_RTX;
7968      cc_src = NULL_RTX;
7969      for (insn = PREV_INSN (last_insn);
7970	   insn && insn != PREV_INSN (BB_HEAD (bb));
7971	   insn = PREV_INSN (insn))
7972	{
7973	  rtx set;
7974
7975	  if (! INSN_P (insn))
7976	    continue;
7977	  set = single_set (insn);
7978	  if (set
7979	      && GET_CODE (SET_DEST (set)) == REG
7980	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7981	    {
7982	      cc_src_insn = insn;
7983	      cc_src = SET_SRC (set);
7984	      break;
7985	    }
7986	  else if (reg_set_p (cc_reg, insn))
7987	    break;
7988	}
7989
7990      if (! cc_src_insn)
7991	continue;
7992
7993      if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7994	continue;
7995
7996      /* Now CC_REG is a condition code register used for a
7997	 conditional jump at the end of the block, and CC_SRC, in
7998	 CC_SRC_INSN, is the value to which that condition code
7999	 register is set, and CC_SRC is still meaningful at the end of
8000	 the basic block.  */
8001
8002      orig_mode = GET_MODE (cc_src);
8003      mode = cse_cc_succs (bb, cc_reg, cc_src, true);
8004      if (mode != VOIDmode)
8005	{
8006	  if (mode != GET_MODE (cc_src))
8007	    abort ();
8008	  if (mode != orig_mode)
8009	    {
8010	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
8011
8012	      /* Change the mode of CC_REG in CC_SRC_INSN to
8013		 GET_MODE (NEWREG).  */
8014	      for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
8015			    newreg);
8016	      for_each_rtx (&REG_NOTES (cc_src_insn), cse_change_cc_mode,
8017			    newreg);
8018
8019	      /* Do the same in the following insns that use the
8020		 current value of CC_REG within BB.  */
8021	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
8022					NEXT_INSN (last_insn),
8023					newreg);
8024	    }
8025	}
8026    }
8027}
8028