cse.c revision 90075
190075Sobrien/* Common subexpression elimination for GNU compiler.
290075Sobrien   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
390075Sobrien   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
490075Sobrien
590075SobrienThis file is part of GCC.
690075Sobrien
790075SobrienGCC is free software; you can redistribute it and/or modify it under
890075Sobrienthe terms of the GNU General Public License as published by the Free
990075SobrienSoftware Foundation; either version 2, or (at your option) any later
1090075Sobrienversion.
1190075Sobrien
1290075SobrienGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1390075SobrienWARRANTY; without even the implied warranty of MERCHANTABILITY or
1490075SobrienFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1590075Sobrienfor more details.
1690075Sobrien
1790075SobrienYou should have received a copy of the GNU General Public License
1890075Sobrienalong with GCC; see the file COPYING.  If not, write to the Free
1990075SobrienSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
2090075Sobrien02111-1307, USA.  */
2190075Sobrien
2290075Sobrien#include "config.h"
2390075Sobrien/* stdio.h must precede rtl.h for FFS.  */
2490075Sobrien#include "system.h"
2590075Sobrien
2690075Sobrien#include "rtl.h"
2790075Sobrien#include "tm_p.h"
2890075Sobrien#include "regs.h"
2990075Sobrien#include "hard-reg-set.h"
3090075Sobrien#include "basic-block.h"
3190075Sobrien#include "flags.h"
3290075Sobrien#include "real.h"
3390075Sobrien#include "insn-config.h"
3490075Sobrien#include "recog.h"
3590075Sobrien#include "function.h"
3690075Sobrien#include "expr.h"
3790075Sobrien#include "toplev.h"
3890075Sobrien#include "output.h"
3990075Sobrien#include "ggc.h"
4090075Sobrien
4190075Sobrien/* The basic idea of common subexpression elimination is to go
4290075Sobrien   through the code, keeping a record of expressions that would
4390075Sobrien   have the same value at the current scan point, and replacing
4490075Sobrien   expressions encountered with the cheapest equivalent expression.
4590075Sobrien
4690075Sobrien   It is too complicated to keep track of the different possibilities
4790075Sobrien   when control paths merge in this code; so, at each label, we forget all
4890075Sobrien   that is known and start fresh.  This can be described as processing each
4990075Sobrien   extended basic block separately.  We have a separate pass to perform
5090075Sobrien   global CSE.
5190075Sobrien
5290075Sobrien   Note CSE can turn a conditional or computed jump into a nop or
5390075Sobrien   an unconditional jump.  When this occurs we arrange to run the jump
5490075Sobrien   optimizer after CSE to delete the unreachable code.
5590075Sobrien
5690075Sobrien   We use two data structures to record the equivalent expressions:
5790075Sobrien   a hash table for most expressions, and a vector of "quantity
5890075Sobrien   numbers" to record equivalent (pseudo) registers.
5990075Sobrien
6090075Sobrien   The use of the special data structure for registers is desirable
6190075Sobrien   because it is faster.  It is possible because registers references
6290075Sobrien   contain a fairly small number, the register number, taken from
6390075Sobrien   a contiguously allocated series, and two register references are
6490075Sobrien   identical if they have the same number.  General expressions
6590075Sobrien   do not have any such thing, so the only way to retrieve the
6690075Sobrien   information recorded on an expression other than a register
6790075Sobrien   is to keep it in a hash table.
6890075Sobrien
6990075SobrienRegisters and "quantity numbers":
7090075Sobrien
7190075Sobrien   At the start of each basic block, all of the (hardware and pseudo)
7290075Sobrien   registers used in the function are given distinct quantity
7390075Sobrien   numbers to indicate their contents.  During scan, when the code
7490075Sobrien   copies one register into another, we copy the quantity number.
7590075Sobrien   When a register is loaded in any other way, we allocate a new
7690075Sobrien   quantity number to describe the value generated by this operation.
7790075Sobrien   `reg_qty' records what quantity a register is currently thought
7890075Sobrien   of as containing.
7990075Sobrien
8090075Sobrien   All real quantity numbers are greater than or equal to `max_reg'.
8190075Sobrien   If register N has not been assigned a quantity, reg_qty[N] will equal N.
8290075Sobrien
8390075Sobrien   Quantity numbers below `max_reg' do not exist and none of the `qty_table'
8490075Sobrien   entries should be referenced with an index below `max_reg'.
8590075Sobrien
8690075Sobrien   We also maintain a bidirectional chain of registers for each
8790075Sobrien   quantity number.  The `qty_table` members `first_reg' and `last_reg',
8890075Sobrien   and `reg_eqv_table' members `next' and `prev' hold these chains.
8990075Sobrien
9090075Sobrien   The first register in a chain is the one whose lifespan is least local.
9190075Sobrien   Among equals, it is the one that was seen first.
9290075Sobrien   We replace any equivalent register with that one.
9390075Sobrien
9490075Sobrien   If two registers have the same quantity number, it must be true that
9590075Sobrien   REG expressions with qty_table `mode' must be in the hash table for both
9690075Sobrien   registers and must be in the same class.
9790075Sobrien
9890075Sobrien   The converse is not true.  Since hard registers may be referenced in
9990075Sobrien   any mode, two REG expressions might be equivalent in the hash table
10090075Sobrien   but not have the same quantity number if the quantity number of one
10190075Sobrien   of the registers is not the same mode as those expressions.
10290075Sobrien
10390075SobrienConstants and quantity numbers
10490075Sobrien
10590075Sobrien   When a quantity has a known constant value, that value is stored
10690075Sobrien   in the appropriate qty_table `const_rtx'.  This is in addition to
10790075Sobrien   putting the constant in the hash table as is usual for non-regs.
10890075Sobrien
10990075Sobrien   Whether a reg or a constant is preferred is determined by the configuration
11090075Sobrien   macro CONST_COSTS and will often depend on the constant value.  In any
11190075Sobrien   event, expressions containing constants can be simplified, by fold_rtx.
11290075Sobrien
11390075Sobrien   When a quantity has a known nearly constant value (such as an address
11490075Sobrien   of a stack slot), that value is stored in the appropriate qty_table
11590075Sobrien   `const_rtx'.
11690075Sobrien
11790075Sobrien   Integer constants don't have a machine mode.  However, cse
11890075Sobrien   determines the intended machine mode from the destination
11990075Sobrien   of the instruction that moves the constant.  The machine mode
12090075Sobrien   is recorded in the hash table along with the actual RTL
12190075Sobrien   constant expression so that different modes are kept separate.
12290075Sobrien
12390075SobrienOther expressions:
12490075Sobrien
12590075Sobrien   To record known equivalences among expressions in general
12690075Sobrien   we use a hash table called `table'.  It has a fixed number of buckets
12790075Sobrien   that contain chains of `struct table_elt' elements for expressions.
12890075Sobrien   These chains connect the elements whose expressions have the same
12990075Sobrien   hash codes.
13090075Sobrien
13190075Sobrien   Other chains through the same elements connect the elements which
13290075Sobrien   currently have equivalent values.
13390075Sobrien
13490075Sobrien   Register references in an expression are canonicalized before hashing
13590075Sobrien   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
13690075Sobrien   The hash code of a register reference is computed using the quantity
13790075Sobrien   number, not the register number.
13890075Sobrien
13990075Sobrien   When the value of an expression changes, it is necessary to remove from the
14090075Sobrien   hash table not just that expression but all expressions whose values
14190075Sobrien   could be different as a result.
14290075Sobrien
14390075Sobrien     1. If the value changing is in memory, except in special cases
14490075Sobrien     ANYTHING referring to memory could be changed.  That is because
14590075Sobrien     nobody knows where a pointer does not point.
14690075Sobrien     The function `invalidate_memory' removes what is necessary.
14790075Sobrien
14890075Sobrien     The special cases are when the address is constant or is
14990075Sobrien     a constant plus a fixed register such as the frame pointer
15090075Sobrien     or a static chain pointer.  When such addresses are stored in,
15190075Sobrien     we can tell exactly which other such addresses must be invalidated
15290075Sobrien     due to overlap.  `invalidate' does this.
15390075Sobrien     All expressions that refer to non-constant
15490075Sobrien     memory addresses are also invalidated.  `invalidate_memory' does this.
15590075Sobrien
15690075Sobrien     2. If the value changing is a register, all expressions
15790075Sobrien     containing references to that register, and only those,
15890075Sobrien     must be removed.
15990075Sobrien
16090075Sobrien   Because searching the entire hash table for expressions that contain
16190075Sobrien   a register is very slow, we try to figure out when it isn't necessary.
16290075Sobrien   Precisely, this is necessary only when expressions have been
16390075Sobrien   entered in the hash table using this register, and then the value has
16490075Sobrien   changed, and then another expression wants to be added to refer to
16590075Sobrien   the register's new value.  This sequence of circumstances is rare
16690075Sobrien   within any one basic block.
16790075Sobrien
16890075Sobrien   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
16990075Sobrien   reg_tick[i] is incremented whenever a value is stored in register i.
17090075Sobrien   reg_in_table[i] holds -1 if no references to register i have been
17190075Sobrien   entered in the table; otherwise, it contains the value reg_tick[i] had
17290075Sobrien   when the references were entered.  If we want to enter a reference
17390075Sobrien   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
17490075Sobrien   Until we want to enter a new entry, the mere fact that the two vectors
17590075Sobrien   don't match makes the entries be ignored if anyone tries to match them.
17690075Sobrien
17790075Sobrien   Registers themselves are entered in the hash table as well as in
17890075Sobrien   the equivalent-register chains.  However, the vectors `reg_tick'
17990075Sobrien   and `reg_in_table' do not apply to expressions which are simple
18090075Sobrien   register references.  These expressions are removed from the table
18190075Sobrien   immediately when they become invalid, and this can be done even if
18290075Sobrien   we do not immediately search for all the expressions that refer to
18390075Sobrien   the register.
18490075Sobrien
18590075Sobrien   A CLOBBER rtx in an instruction invalidates its operand for further
18690075Sobrien   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
18790075Sobrien   invalidates everything that resides in memory.
18890075Sobrien
18990075SobrienRelated expressions:
19090075Sobrien
19190075Sobrien   Constant expressions that differ only by an additive integer
19290075Sobrien   are called related.  When a constant expression is put in
19390075Sobrien   the table, the related expression with no constant term
19490075Sobrien   is also entered.  These are made to point at each other
19590075Sobrien   so that it is possible to find out if there exists any
19690075Sobrien   register equivalent to an expression related to a given expression.  */
19790075Sobrien
19890075Sobrien/* One plus largest register number used in this function.  */
19990075Sobrien
20090075Sobrienstatic int max_reg;
20190075Sobrien
20290075Sobrien/* One plus largest instruction UID used in this function at time of
20390075Sobrien   cse_main call.  */
20490075Sobrien
20590075Sobrienstatic int max_insn_uid;
20690075Sobrien
20790075Sobrien/* Length of qty_table vector.  We know in advance we will not need
20890075Sobrien   a quantity number this big.  */
20990075Sobrien
21090075Sobrienstatic int max_qty;
21190075Sobrien
21290075Sobrien/* Next quantity number to be allocated.
21390075Sobrien   This is 1 + the largest number needed so far.  */
21490075Sobrien
21590075Sobrienstatic int next_qty;
21690075Sobrien
21790075Sobrien/* Per-qty information tracking.
21890075Sobrien
21990075Sobrien   `first_reg' and `last_reg' track the head and tail of the
22090075Sobrien   chain of registers which currently contain this quantity.
22190075Sobrien
22290075Sobrien   `mode' contains the machine mode of this quantity.
22390075Sobrien
22490075Sobrien   `const_rtx' holds the rtx of the constant value of this
22590075Sobrien   quantity, if known.  A summations of the frame/arg pointer
22690075Sobrien   and a constant can also be entered here.  When this holds
22790075Sobrien   a known value, `const_insn' is the insn which stored the
22890075Sobrien   constant value.
22990075Sobrien
23090075Sobrien   `comparison_{code,const,qty}' are used to track when a
23190075Sobrien   comparison between a quantity and some constant or register has
23290075Sobrien   been passed.  In such a case, we know the results of the comparison
23390075Sobrien   in case we see it again.  These members record a comparison that
23490075Sobrien   is known to be true.  `comparison_code' holds the rtx code of such
23590075Sobrien   a comparison, else it is set to UNKNOWN and the other two
23690075Sobrien   comparison members are undefined.  `comparison_const' holds
23790075Sobrien   the constant being compared against, or zero if the comparison
23890075Sobrien   is not against a constant.  `comparison_qty' holds the quantity
23990075Sobrien   being compared against when the result is known.  If the comparison
24090075Sobrien   is not with a register, `comparison_qty' is -1.  */
24190075Sobrien
24290075Sobrienstruct qty_table_elem
24390075Sobrien{
24490075Sobrien  rtx const_rtx;
24590075Sobrien  rtx const_insn;
24690075Sobrien  rtx comparison_const;
24790075Sobrien  int comparison_qty;
24890075Sobrien  unsigned int first_reg, last_reg;
24990075Sobrien  enum machine_mode mode;
25090075Sobrien  enum rtx_code comparison_code;
25190075Sobrien};
25290075Sobrien
25390075Sobrien/* The table of all qtys, indexed by qty number.  */
25490075Sobrienstatic struct qty_table_elem *qty_table;
25590075Sobrien
25690075Sobrien#ifdef HAVE_cc0
25790075Sobrien/* For machines that have a CC0, we do not record its value in the hash
25890075Sobrien   table since its use is guaranteed to be the insn immediately following
25990075Sobrien   its definition and any other insn is presumed to invalidate it.
26090075Sobrien
26190075Sobrien   Instead, we store below the value last assigned to CC0.  If it should
26290075Sobrien   happen to be a constant, it is stored in preference to the actual
26390075Sobrien   assigned value.  In case it is a constant, we store the mode in which
26490075Sobrien   the constant should be interpreted.  */
26590075Sobrien
26690075Sobrienstatic rtx prev_insn_cc0;
26790075Sobrienstatic enum machine_mode prev_insn_cc0_mode;
26890075Sobrien#endif
26990075Sobrien
27090075Sobrien/* Previous actual insn.  0 if at first insn of basic block.  */
27190075Sobrien
27290075Sobrienstatic rtx prev_insn;
27390075Sobrien
27490075Sobrien/* Insn being scanned.  */
27590075Sobrien
27690075Sobrienstatic rtx this_insn;
27790075Sobrien
27890075Sobrien/* Index by register number, gives the number of the next (or
27990075Sobrien   previous) register in the chain of registers sharing the same
28090075Sobrien   value.
28190075Sobrien
28290075Sobrien   Or -1 if this register is at the end of the chain.
28390075Sobrien
28490075Sobrien   If reg_qty[N] == N, reg_eqv_table[N].next is undefined.  */
28590075Sobrien
28690075Sobrien/* Per-register equivalence chain.  */
28790075Sobrienstruct reg_eqv_elem
28890075Sobrien{
28990075Sobrien  int next, prev;
29090075Sobrien};
29190075Sobrien
29290075Sobrien/* The table of all register equivalence chains.  */
29390075Sobrienstatic struct reg_eqv_elem *reg_eqv_table;
29490075Sobrien
29590075Sobrienstruct cse_reg_info
29690075Sobrien{
29790075Sobrien  /* Next in hash chain.  */
29890075Sobrien  struct cse_reg_info *hash_next;
29990075Sobrien
30090075Sobrien  /* The next cse_reg_info structure in the free or used list.  */
30190075Sobrien  struct cse_reg_info *next;
30290075Sobrien
30390075Sobrien  /* Search key */
30490075Sobrien  unsigned int regno;
30590075Sobrien
30690075Sobrien  /* The quantity number of the register's current contents.  */
30790075Sobrien  int reg_qty;
30890075Sobrien
30990075Sobrien  /* The number of times the register has been altered in the current
31090075Sobrien     basic block.  */
31190075Sobrien  int reg_tick;
31290075Sobrien
31390075Sobrien  /* The REG_TICK value at which rtx's containing this register are
31490075Sobrien     valid in the hash table.  If this does not equal the current
31590075Sobrien     reg_tick value, such expressions existing in the hash table are
31690075Sobrien     invalid.  */
31790075Sobrien  int reg_in_table;
31890075Sobrien};
31990075Sobrien
32090075Sobrien/* A free list of cse_reg_info entries.  */
32190075Sobrienstatic struct cse_reg_info *cse_reg_info_free_list;
32290075Sobrien
32390075Sobrien/* A used list of cse_reg_info entries.  */
32490075Sobrienstatic struct cse_reg_info *cse_reg_info_used_list;
32590075Sobrienstatic struct cse_reg_info *cse_reg_info_used_list_end;
32690075Sobrien
32790075Sobrien/* A mapping from registers to cse_reg_info data structures.  */
32890075Sobrien#define REGHASH_SHIFT	7
32990075Sobrien#define REGHASH_SIZE	(1 << REGHASH_SHIFT)
33090075Sobrien#define REGHASH_MASK	(REGHASH_SIZE - 1)
33190075Sobrienstatic struct cse_reg_info *reg_hash[REGHASH_SIZE];
33290075Sobrien
33390075Sobrien#define REGHASH_FN(REGNO)	\
33490075Sobrien	(((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
33590075Sobrien
33690075Sobrien/* The last lookup we did into the cse_reg_info_tree.  This allows us
33790075Sobrien   to cache repeated lookups.  */
33890075Sobrienstatic unsigned int cached_regno;
33990075Sobrienstatic struct cse_reg_info *cached_cse_reg_info;
34090075Sobrien
34190075Sobrien/* A HARD_REG_SET containing all the hard registers for which there is
34290075Sobrien   currently a REG expression in the hash table.  Note the difference
34390075Sobrien   from the above variables, which indicate if the REG is mentioned in some
34490075Sobrien   expression in the table.  */
34590075Sobrien
34690075Sobrienstatic HARD_REG_SET hard_regs_in_table;
34790075Sobrien
34890075Sobrien/* CUID of insn that starts the basic block currently being cse-processed.  */
34990075Sobrien
35090075Sobrienstatic int cse_basic_block_start;
35190075Sobrien
35290075Sobrien/* CUID of insn that ends the basic block currently being cse-processed.  */
35390075Sobrien
35490075Sobrienstatic int cse_basic_block_end;
35590075Sobrien
35690075Sobrien/* Vector mapping INSN_UIDs to cuids.
35790075Sobrien   The cuids are like uids but increase monotonically always.
35890075Sobrien   We use them to see whether a reg is used outside a given basic block.  */
35990075Sobrien
36090075Sobrienstatic int *uid_cuid;
36190075Sobrien
36290075Sobrien/* Highest UID in UID_CUID.  */
36390075Sobrienstatic int max_uid;
36490075Sobrien
36590075Sobrien/* Get the cuid of an insn.  */
36690075Sobrien
36790075Sobrien#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
36890075Sobrien
36990075Sobrien/* Nonzero if this pass has made changes, and therefore it's
37090075Sobrien   worthwhile to run the garbage collector.  */
37190075Sobrien
37290075Sobrienstatic int cse_altered;
37390075Sobrien
37490075Sobrien/* Nonzero if cse has altered conditional jump insns
37590075Sobrien   in such a way that jump optimization should be redone.  */
37690075Sobrien
37790075Sobrienstatic int cse_jumps_altered;
37890075Sobrien
37990075Sobrien/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
38090075Sobrien   REG_LABEL, we have to rerun jump after CSE to put in the note.  */
38190075Sobrienstatic int recorded_label_ref;
38290075Sobrien
38390075Sobrien/* canon_hash stores 1 in do_not_record
38490075Sobrien   if it notices a reference to CC0, PC, or some other volatile
38590075Sobrien   subexpression.  */
38690075Sobrien
38790075Sobrienstatic int do_not_record;
38890075Sobrien
38990075Sobrien#ifdef LOAD_EXTEND_OP
39090075Sobrien
39190075Sobrien/* Scratch rtl used when looking for load-extended copy of a MEM.  */
39290075Sobrienstatic rtx memory_extend_rtx;
39390075Sobrien#endif
39490075Sobrien
39590075Sobrien/* canon_hash stores 1 in hash_arg_in_memory
39690075Sobrien   if it notices a reference to memory within the expression being hashed.  */
39790075Sobrien
39890075Sobrienstatic int hash_arg_in_memory;
39990075Sobrien
40090075Sobrien/* The hash table contains buckets which are chains of `struct table_elt's,
40190075Sobrien   each recording one expression's information.
40290075Sobrien   That expression is in the `exp' field.
40390075Sobrien
40490075Sobrien   The canon_exp field contains a canonical (from the point of view of
40590075Sobrien   alias analysis) version of the `exp' field.
40690075Sobrien
40790075Sobrien   Those elements with the same hash code are chained in both directions
40890075Sobrien   through the `next_same_hash' and `prev_same_hash' fields.
40990075Sobrien
41090075Sobrien   Each set of expressions with equivalent values
41190075Sobrien   are on a two-way chain through the `next_same_value'
41290075Sobrien   and `prev_same_value' fields, and all point with
41390075Sobrien   the `first_same_value' field at the first element in
41490075Sobrien   that chain.  The chain is in order of increasing cost.
41590075Sobrien   Each element's cost value is in its `cost' field.
41690075Sobrien
41790075Sobrien   The `in_memory' field is nonzero for elements that
41890075Sobrien   involve any reference to memory.  These elements are removed
41990075Sobrien   whenever a write is done to an unidentified location in memory.
42090075Sobrien   To be safe, we assume that a memory address is unidentified unless
42190075Sobrien   the address is either a symbol constant or a constant plus
42290075Sobrien   the frame pointer or argument pointer.
42390075Sobrien
42490075Sobrien   The `related_value' field is used to connect related expressions
42590075Sobrien   (that differ by adding an integer).
42690075Sobrien   The related expressions are chained in a circular fashion.
42790075Sobrien   `related_value' is zero for expressions for which this
42890075Sobrien   chain is not useful.
42990075Sobrien
43090075Sobrien   The `cost' field stores the cost of this element's expression.
43190075Sobrien   The `regcost' field stores the value returned by approx_reg_cost for
43290075Sobrien   this element's expression.
43390075Sobrien
43490075Sobrien   The `is_const' flag is set if the element is a constant (including
43590075Sobrien   a fixed address).
43690075Sobrien
43790075Sobrien   The `flag' field is used as a temporary during some search routines.
43890075Sobrien
43990075Sobrien   The `mode' field is usually the same as GET_MODE (`exp'), but
44090075Sobrien   if `exp' is a CONST_INT and has no machine mode then the `mode'
44190075Sobrien   field is the mode it was being used as.  Each constant is
44290075Sobrien   recorded separately for each mode it is used with.  */
44390075Sobrien
44490075Sobrienstruct table_elt
44590075Sobrien{
44690075Sobrien  rtx exp;
44790075Sobrien  rtx canon_exp;
44890075Sobrien  struct table_elt *next_same_hash;
44990075Sobrien  struct table_elt *prev_same_hash;
45090075Sobrien  struct table_elt *next_same_value;
45190075Sobrien  struct table_elt *prev_same_value;
45290075Sobrien  struct table_elt *first_same_value;
45390075Sobrien  struct table_elt *related_value;
45490075Sobrien  int cost;
45590075Sobrien  int regcost;
45690075Sobrien  enum machine_mode mode;
45790075Sobrien  char in_memory;
45890075Sobrien  char is_const;
45990075Sobrien  char flag;
46090075Sobrien};
46190075Sobrien
46290075Sobrien/* We don't want a lot of buckets, because we rarely have very many
46390075Sobrien   things stored in the hash table, and a lot of buckets slows
46490075Sobrien   down a lot of loops that happen frequently.  */
46590075Sobrien#define HASH_SHIFT	5
46690075Sobrien#define HASH_SIZE	(1 << HASH_SHIFT)
46790075Sobrien#define HASH_MASK	(HASH_SIZE - 1)
46890075Sobrien
46990075Sobrien/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
47090075Sobrien   register (hard registers may require `do_not_record' to be set).  */
47190075Sobrien
47290075Sobrien#define HASH(X, M)	\
47390075Sobrien ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
47490075Sobrien  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
47590075Sobrien  : canon_hash (X, M)) & HASH_MASK)
47690075Sobrien
47790075Sobrien/* Determine whether register number N is considered a fixed register for the
47890075Sobrien   purpose of approximating register costs.
47990075Sobrien   It is desirable to replace other regs with fixed regs, to reduce need for
48090075Sobrien   non-fixed hard regs.
48190075Sobrien   A reg wins if it is either the frame pointer or designated as fixed.  */
48290075Sobrien#define FIXED_REGNO_P(N)  \
48390075Sobrien  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
48490075Sobrien   || fixed_regs[N] || global_regs[N])
48590075Sobrien
48690075Sobrien/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
48790075Sobrien   hard registers and pointers into the frame are the cheapest with a cost
48890075Sobrien   of 0.  Next come pseudos with a cost of one and other hard registers with
48990075Sobrien   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
49090075Sobrien
49190075Sobrien#define CHEAP_REGNO(N) \
49290075Sobrien  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
49390075Sobrien   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
49490075Sobrien   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
49590075Sobrien   || ((N) < FIRST_PSEUDO_REGISTER					\
49690075Sobrien       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
49790075Sobrien
49890075Sobrien#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
49990075Sobrien#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
50090075Sobrien
50190075Sobrien/* Get the info associated with register N.  */
50290075Sobrien
50390075Sobrien#define GET_CSE_REG_INFO(N) 			\
50490075Sobrien  (((N) == cached_regno && cached_cse_reg_info)	\
50590075Sobrien   ? cached_cse_reg_info : get_cse_reg_info ((N)))
50690075Sobrien
50790075Sobrien/* Get the number of times this register has been updated in this
50890075Sobrien   basic block.  */
50990075Sobrien
51090075Sobrien#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
51190075Sobrien
51290075Sobrien/* Get the point at which REG was recorded in the table.  */
51390075Sobrien
51490075Sobrien#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
51590075Sobrien
51690075Sobrien/* Get the quantity number for REG.  */
51790075Sobrien
51890075Sobrien#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
51990075Sobrien
52090075Sobrien/* Determine if the quantity number for register X represents a valid index
52190075Sobrien   into the qty_table.  */
52290075Sobrien
52390075Sobrien#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
52490075Sobrien
52590075Sobrienstatic struct table_elt *table[HASH_SIZE];
52690075Sobrien
52790075Sobrien/* Chain of `struct table_elt's made so far for this function
52890075Sobrien   but currently removed from the table.  */
52990075Sobrien
53090075Sobrienstatic struct table_elt *free_element_chain;
53190075Sobrien
53290075Sobrien/* Number of `struct table_elt' structures made so far for this function.  */
53390075Sobrien
53490075Sobrienstatic int n_elements_made;
53590075Sobrien
53690075Sobrien/* Maximum value `n_elements_made' has had so far in this compilation
53790075Sobrien   for functions previously processed.  */
53890075Sobrien
53990075Sobrienstatic int max_elements_made;
54090075Sobrien
54190075Sobrien/* Surviving equivalence class when two equivalence classes are merged
54290075Sobrien   by recording the effects of a jump in the last insn.  Zero if the
54390075Sobrien   last insn was not a conditional jump.  */
54490075Sobrien
54590075Sobrienstatic struct table_elt *last_jump_equiv_class;
54690075Sobrien
54790075Sobrien/* Set to the cost of a constant pool reference if one was found for a
54890075Sobrien   symbolic constant.  If this was found, it means we should try to
54990075Sobrien   convert constants into constant pool entries if they don't fit in
55090075Sobrien   the insn.  */
55190075Sobrien
55290075Sobrienstatic int constant_pool_entries_cost;
55390075Sobrien
55490075Sobrien/* Define maximum length of a branch path.  */
55590075Sobrien
55690075Sobrien#define PATHLENGTH	10
55790075Sobrien
55890075Sobrien/* This data describes a block that will be processed by cse_basic_block.  */
55990075Sobrien
56090075Sobrienstruct cse_basic_block_data
56190075Sobrien{
56290075Sobrien  /* Lowest CUID value of insns in block.  */
56390075Sobrien  int low_cuid;
56490075Sobrien  /* Highest CUID value of insns in block.  */
56590075Sobrien  int high_cuid;
56690075Sobrien  /* Total number of SETs in block.  */
56790075Sobrien  int nsets;
56890075Sobrien  /* Last insn in the block.  */
56990075Sobrien  rtx last;
57090075Sobrien  /* Size of current branch path, if any.  */
57190075Sobrien  int path_size;
57290075Sobrien  /* Current branch path, indicating which branches will be taken.  */
57390075Sobrien  struct branch_path
57490075Sobrien    {
57590075Sobrien      /* The branch insn.  */
57690075Sobrien      rtx branch;
57790075Sobrien      /* Whether it should be taken or not.  AROUND is the same as taken
57890075Sobrien	 except that it is used when the destination label is not preceded
57990075Sobrien       by a BARRIER.  */
58090075Sobrien      enum taken {TAKEN, NOT_TAKEN, AROUND} status;
58190075Sobrien    } path[PATHLENGTH];
58290075Sobrien};
58390075Sobrien
58490075Sobrien/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
58590075Sobrien   virtual regs here because the simplify_*_operation routines are called
58690075Sobrien   by integrate.c, which is called before virtual register instantiation.
58790075Sobrien
58890075Sobrien   ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
58990075Sobrien   a header file so that their definitions can be shared with the
59090075Sobrien   simplification routines in simplify-rtx.c.  Until then, do not
59190075Sobrien   change these macros without also changing the copy in simplify-rtx.c.  */
59290075Sobrien
59390075Sobrien#define FIXED_BASE_PLUS_P(X)					\
59490075Sobrien  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
59590075Sobrien   || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
59690075Sobrien   || (X) == virtual_stack_vars_rtx				\
59790075Sobrien   || (X) == virtual_incoming_args_rtx				\
59890075Sobrien   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
59990075Sobrien       && (XEXP (X, 0) == frame_pointer_rtx			\
60090075Sobrien	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
60190075Sobrien	   || ((X) == arg_pointer_rtx				\
60290075Sobrien	       && fixed_regs[ARG_POINTER_REGNUM])		\
60390075Sobrien	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
60490075Sobrien	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
60590075Sobrien   || GET_CODE (X) == ADDRESSOF)
60690075Sobrien
60790075Sobrien/* Similar, but also allows reference to the stack pointer.
60890075Sobrien
60990075Sobrien   This used to include FIXED_BASE_PLUS_P, however, we can't assume that
61090075Sobrien   arg_pointer_rtx by itself is nonzero, because on at least one machine,
61190075Sobrien   the i960, the arg pointer is zero when it is unused.  */
61290075Sobrien
61390075Sobrien#define NONZERO_BASE_PLUS_P(X)					\
61490075Sobrien  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
61590075Sobrien   || (X) == virtual_stack_vars_rtx				\
61690075Sobrien   || (X) == virtual_incoming_args_rtx				\
61790075Sobrien   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
61890075Sobrien       && (XEXP (X, 0) == frame_pointer_rtx			\
61990075Sobrien	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
62090075Sobrien	   || ((X) == arg_pointer_rtx				\
62190075Sobrien	       && fixed_regs[ARG_POINTER_REGNUM])		\
62290075Sobrien	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
62390075Sobrien	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
62490075Sobrien   || (X) == stack_pointer_rtx					\
62590075Sobrien   || (X) == virtual_stack_dynamic_rtx				\
62690075Sobrien   || (X) == virtual_outgoing_args_rtx				\
62790075Sobrien   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
62890075Sobrien       && (XEXP (X, 0) == stack_pointer_rtx			\
62990075Sobrien	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
63090075Sobrien	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
63190075Sobrien   || GET_CODE (X) == ADDRESSOF)
63290075Sobrien
63390075Sobrienstatic int notreg_cost		PARAMS ((rtx, enum rtx_code));
63490075Sobrienstatic int approx_reg_cost_1	PARAMS ((rtx *, void *));
63590075Sobrienstatic int approx_reg_cost	PARAMS ((rtx));
63690075Sobrienstatic int preferrable		PARAMS ((int, int, int, int));
63790075Sobrienstatic void new_basic_block	PARAMS ((void));
63890075Sobrienstatic void make_new_qty	PARAMS ((unsigned int, enum machine_mode));
63990075Sobrienstatic void make_regs_eqv	PARAMS ((unsigned int, unsigned int));
64090075Sobrienstatic void delete_reg_equiv	PARAMS ((unsigned int));
64190075Sobrienstatic int mention_regs		PARAMS ((rtx));
64290075Sobrienstatic int insert_regs		PARAMS ((rtx, struct table_elt *, int));
64390075Sobrienstatic void remove_from_table	PARAMS ((struct table_elt *, unsigned));
64490075Sobrienstatic struct table_elt *lookup	PARAMS ((rtx, unsigned, enum machine_mode)),
64590075Sobrien       *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
64690075Sobrienstatic rtx lookup_as_function	PARAMS ((rtx, enum rtx_code));
64790075Sobrienstatic struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
64890075Sobrien					 enum machine_mode));
64990075Sobrienstatic void merge_equiv_classes PARAMS ((struct table_elt *,
65090075Sobrien					 struct table_elt *));
65190075Sobrienstatic void invalidate		PARAMS ((rtx, enum machine_mode));
65290075Sobrienstatic int cse_rtx_varies_p	PARAMS ((rtx, int));
65390075Sobrienstatic void remove_invalid_refs	PARAMS ((unsigned int));
65490075Sobrienstatic void remove_invalid_subreg_refs	PARAMS ((unsigned int, unsigned int,
65590075Sobrien						 enum machine_mode));
65690075Sobrienstatic void rehash_using_reg	PARAMS ((rtx));
65790075Sobrienstatic void invalidate_memory	PARAMS ((void));
65890075Sobrienstatic void invalidate_for_call	PARAMS ((void));
65990075Sobrienstatic rtx use_related_value	PARAMS ((rtx, struct table_elt *));
66090075Sobrienstatic unsigned canon_hash	PARAMS ((rtx, enum machine_mode));
66190075Sobrienstatic unsigned canon_hash_string PARAMS ((const char *));
66290075Sobrienstatic unsigned safe_hash	PARAMS ((rtx, enum machine_mode));
66390075Sobrienstatic int exp_equiv_p		PARAMS ((rtx, rtx, int, int));
66490075Sobrienstatic rtx canon_reg		PARAMS ((rtx, rtx));
66590075Sobrienstatic void find_best_addr	PARAMS ((rtx, rtx *, enum machine_mode));
66690075Sobrienstatic enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
66790075Sobrien						   enum machine_mode *,
66890075Sobrien						   enum machine_mode *));
66990075Sobrienstatic rtx fold_rtx		PARAMS ((rtx, rtx));
67090075Sobrienstatic rtx equiv_constant	PARAMS ((rtx));
67190075Sobrienstatic void record_jump_equiv	PARAMS ((rtx, int));
67290075Sobrienstatic void record_jump_cond	PARAMS ((enum rtx_code, enum machine_mode,
67390075Sobrien					 rtx, rtx, int));
67490075Sobrienstatic void cse_insn		PARAMS ((rtx, rtx));
67590075Sobrienstatic int addr_affects_sp_p	PARAMS ((rtx));
67690075Sobrienstatic void invalidate_from_clobbers PARAMS ((rtx));
67790075Sobrienstatic rtx cse_process_notes	PARAMS ((rtx, rtx));
67890075Sobrienstatic void cse_around_loop	PARAMS ((rtx));
67990075Sobrienstatic void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
68090075Sobrienstatic void invalidate_skipped_block PARAMS ((rtx));
68190075Sobrienstatic void cse_check_loop_start PARAMS ((rtx, rtx, void *));
68290075Sobrienstatic void cse_set_around_loop	PARAMS ((rtx, rtx, rtx));
68390075Sobrienstatic rtx cse_basic_block	PARAMS ((rtx, rtx, struct branch_path *, int));
68490075Sobrienstatic void count_reg_usage	PARAMS ((rtx, int *, rtx, int));
68590075Sobrienstatic int check_for_label_ref	PARAMS ((rtx *, void *));
68690075Sobrienextern void dump_class          PARAMS ((struct table_elt*));
68790075Sobrienstatic struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
68890075Sobrienstatic int check_dependence	PARAMS ((rtx *, void *));
68990075Sobrien
69090075Sobrienstatic void flush_hash_table	PARAMS ((void));
69190075Sobrienstatic bool insn_live_p		PARAMS ((rtx, int *));
69290075Sobrienstatic bool set_live_p		PARAMS ((rtx, rtx, int *));
69390075Sobrienstatic bool dead_libcall_p	PARAMS ((rtx));
69490075Sobrien
69590075Sobrien/* Dump the expressions in the equivalence class indicated by CLASSP.
69690075Sobrien   This function is used only for debugging.  */
69790075Sobrienvoid
69890075Sobriendump_class (classp)
69990075Sobrien     struct table_elt *classp;
70090075Sobrien{
70190075Sobrien  struct table_elt *elt;
70290075Sobrien
70390075Sobrien  fprintf (stderr, "Equivalence chain for ");
70490075Sobrien  print_rtl (stderr, classp->exp);
70590075Sobrien  fprintf (stderr, ": \n");
70690075Sobrien
70790075Sobrien  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
70890075Sobrien    {
70990075Sobrien      print_rtl (stderr, elt->exp);
71090075Sobrien      fprintf (stderr, "\n");
71190075Sobrien    }
71290075Sobrien}
71390075Sobrien
71490075Sobrien/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
71590075Sobrien
71690075Sobrienstatic int
71790075Sobrienapprox_reg_cost_1 (xp, data)
71890075Sobrien     rtx *xp;
71990075Sobrien     void *data;
72090075Sobrien{
72190075Sobrien  rtx x = *xp;
72290075Sobrien  regset set = (regset) data;
72390075Sobrien
72490075Sobrien  if (x && GET_CODE (x) == REG)
72590075Sobrien    SET_REGNO_REG_SET (set, REGNO (x));
72690075Sobrien  return 0;
72790075Sobrien}
72890075Sobrien
72990075Sobrien/* Return an estimate of the cost of the registers used in an rtx.
73090075Sobrien   This is mostly the number of different REG expressions in the rtx;
73190075Sobrien   however for some exceptions like fixed registers we use a cost of
73290075Sobrien   0.  If any other hard register reference occurs, return MAX_COST.  */
73390075Sobrien
73490075Sobrienstatic int
73590075Sobrienapprox_reg_cost (x)
73690075Sobrien     rtx x;
73790075Sobrien{
73890075Sobrien  regset_head set;
73990075Sobrien  int i;
74090075Sobrien  int cost = 0;
74190075Sobrien  int hardregs = 0;
74290075Sobrien
74390075Sobrien  INIT_REG_SET (&set);
74490075Sobrien  for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
74590075Sobrien
74690075Sobrien  EXECUTE_IF_SET_IN_REG_SET
74790075Sobrien    (&set, 0, i,
74890075Sobrien     {
74990075Sobrien       if (! CHEAP_REGNO (i))
75090075Sobrien	 {
75190075Sobrien	   if (i < FIRST_PSEUDO_REGISTER)
752	     hardregs++;
753
754	   cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
755	 }
756     });
757
758  CLEAR_REG_SET (&set);
759  return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
760}
761
762/* Return a negative value if an rtx A, whose costs are given by COST_A
763   and REGCOST_A, is more desirable than an rtx B.
764   Return a positive value if A is less desirable, or 0 if the two are
765   equally good.  */
766static int
767preferrable (cost_a, regcost_a, cost_b, regcost_b)
768     int cost_a, regcost_a, cost_b, regcost_b;
769{
770  /* First, get rid of a cases involving expressions that are entirely
771     unwanted.  */
772  if (cost_a != cost_b)
773    {
774      if (cost_a == MAX_COST)
775	return 1;
776      if (cost_b == MAX_COST)
777	return -1;
778    }
779
780  /* Avoid extending lifetimes of hardregs.  */
781  if (regcost_a != regcost_b)
782    {
783      if (regcost_a == MAX_COST)
784	return 1;
785      if (regcost_b == MAX_COST)
786	return -1;
787    }
788
789  /* Normal operation costs take precedence.  */
790  if (cost_a != cost_b)
791    return cost_a - cost_b;
792  /* Only if these are identical consider effects on register pressure.  */
793  if (regcost_a != regcost_b)
794    return regcost_a - regcost_b;
795  return 0;
796}
797
798/* Internal function, to compute cost when X is not a register; called
799   from COST macro to keep it simple.  */
800
801static int
802notreg_cost (x, outer)
803     rtx x;
804     enum rtx_code outer;
805{
806  return ((GET_CODE (x) == SUBREG
807	   && GET_CODE (SUBREG_REG (x)) == REG
808	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
809	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
810	   && (GET_MODE_SIZE (GET_MODE (x))
811	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
812	   && subreg_lowpart_p (x)
813	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
814				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
815	  ? 0
816	  : rtx_cost (x, outer) * 2);
817}
818
819/* Return an estimate of the cost of computing rtx X.
820   One use is in cse, to decide which expression to keep in the hash table.
821   Another is in rtl generation, to pick the cheapest way to multiply.
822   Other uses like the latter are expected in the future.  */
823
824int
825rtx_cost (x, outer_code)
826     rtx x;
827     enum rtx_code outer_code ATTRIBUTE_UNUSED;
828{
829  int i, j;
830  enum rtx_code code;
831  const char *fmt;
832  int total;
833
834  if (x == 0)
835    return 0;
836
837  /* Compute the default costs of certain things.
838     Note that RTX_COSTS can override the defaults.  */
839
840  code = GET_CODE (x);
841  switch (code)
842    {
843    case MULT:
844      /* Count multiplication by 2**n as a shift,
845	 because if we are considering it, we would output it as a shift.  */
846      if (GET_CODE (XEXP (x, 1)) == CONST_INT
847	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
848	total = 2;
849      else
850	total = COSTS_N_INSNS (5);
851      break;
852    case DIV:
853    case UDIV:
854    case MOD:
855    case UMOD:
856      total = COSTS_N_INSNS (7);
857      break;
858    case USE:
859      /* Used in loop.c and combine.c as a marker.  */
860      total = 0;
861      break;
862    default:
863      total = COSTS_N_INSNS (1);
864    }
865
866  switch (code)
867    {
868    case REG:
869      return 0;
870
871    case SUBREG:
872      /* If we can't tie these modes, make this expensive.  The larger
873	 the mode, the more expensive it is.  */
874      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
875	return COSTS_N_INSNS (2
876			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
877      break;
878
879#ifdef RTX_COSTS
880      RTX_COSTS (x, code, outer_code);
881#endif
882#ifdef CONST_COSTS
883      CONST_COSTS (x, code, outer_code);
884#endif
885
886    default:
887#ifdef DEFAULT_RTX_COSTS
888      DEFAULT_RTX_COSTS (x, code, outer_code);
889#endif
890      break;
891    }
892
893  /* Sum the costs of the sub-rtx's, plus cost of this operation,
894     which is already in total.  */
895
896  fmt = GET_RTX_FORMAT (code);
897  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
898    if (fmt[i] == 'e')
899      total += rtx_cost (XEXP (x, i), code);
900    else if (fmt[i] == 'E')
901      for (j = 0; j < XVECLEN (x, i); j++)
902	total += rtx_cost (XVECEXP (x, i, j), code);
903
904  return total;
905}
906
907/* Return cost of address expression X.
908   Expect that X is properly formed address reference.  */
909
910int
911address_cost (x, mode)
912     rtx x;
913     enum machine_mode mode;
914{
915  /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
916     during CSE, such nodes are present.  Using an ADDRESSOF node which
917     refers to the address of a REG is a good thing because we can then
918     turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
919
920  if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
921    return -1;
922
923  /* We may be asked for cost of various unusual addresses, such as operands
924     of push instruction.  It is not worthwhile to complicate writing
925     of ADDRESS_COST macro by such cases.  */
926
927  if (!memory_address_p (mode, x))
928    return 1000;
929#ifdef ADDRESS_COST
930  return ADDRESS_COST (x);
931#else
932  return rtx_cost (x, MEM);
933#endif
934}
935
936
937static struct cse_reg_info *
938get_cse_reg_info (regno)
939     unsigned int regno;
940{
941  struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
942  struct cse_reg_info *p;
943
944  for (p = *hash_head; p != NULL; p = p->hash_next)
945    if (p->regno == regno)
946      break;
947
948  if (p == NULL)
949    {
950      /* Get a new cse_reg_info structure.  */
951      if (cse_reg_info_free_list)
952	{
953	  p = cse_reg_info_free_list;
954	  cse_reg_info_free_list = p->next;
955	}
956      else
957	p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
958
959      /* Insert into hash table.  */
960      p->hash_next = *hash_head;
961      *hash_head = p;
962
963      /* Initialize it.  */
964      p->reg_tick = 1;
965      p->reg_in_table = -1;
966      p->reg_qty = regno;
967      p->regno = regno;
968      p->next = cse_reg_info_used_list;
969      cse_reg_info_used_list = p;
970      if (!cse_reg_info_used_list_end)
971	cse_reg_info_used_list_end = p;
972    }
973
974  /* Cache this lookup; we tend to be looking up information about the
975     same register several times in a row.  */
976  cached_regno = regno;
977  cached_cse_reg_info = p;
978
979  return p;
980}
981
982/* Clear the hash table and initialize each register with its own quantity,
983   for a new basic block.  */
984
985static void
986new_basic_block ()
987{
988  int i;
989
990  next_qty = max_reg;
991
992  /* Clear out hash table state for this pass.  */
993
994  memset ((char *) reg_hash, 0, sizeof reg_hash);
995
996  if (cse_reg_info_used_list)
997    {
998      cse_reg_info_used_list_end->next = cse_reg_info_free_list;
999      cse_reg_info_free_list = cse_reg_info_used_list;
1000      cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1001    }
1002  cached_cse_reg_info = 0;
1003
1004  CLEAR_HARD_REG_SET (hard_regs_in_table);
1005
1006  /* The per-quantity values used to be initialized here, but it is
1007     much faster to initialize each as it is made in `make_new_qty'.  */
1008
1009  for (i = 0; i < HASH_SIZE; i++)
1010    {
1011      struct table_elt *first;
1012
1013      first = table[i];
1014      if (first != NULL)
1015	{
1016	  struct table_elt *last = first;
1017
1018	  table[i] = NULL;
1019
1020	  while (last->next_same_hash != NULL)
1021	    last = last->next_same_hash;
1022
1023	  /* Now relink this hash entire chain into
1024	     the free element list.  */
1025
1026	  last->next_same_hash = free_element_chain;
1027	  free_element_chain = first;
1028	}
1029    }
1030
1031  prev_insn = 0;
1032
1033#ifdef HAVE_cc0
1034  prev_insn_cc0 = 0;
1035#endif
1036}
1037
1038/* Say that register REG contains a quantity in mode MODE not in any
1039   register before and initialize that quantity.  */
1040
1041static void
1042make_new_qty (reg, mode)
1043     unsigned int reg;
1044     enum machine_mode mode;
1045{
1046  int q;
1047  struct qty_table_elem *ent;
1048  struct reg_eqv_elem *eqv;
1049
1050  if (next_qty >= max_qty)
1051    abort ();
1052
1053  q = REG_QTY (reg) = next_qty++;
1054  ent = &qty_table[q];
1055  ent->first_reg = reg;
1056  ent->last_reg = reg;
1057  ent->mode = mode;
1058  ent->const_rtx = ent->const_insn = NULL_RTX;
1059  ent->comparison_code = UNKNOWN;
1060
1061  eqv = &reg_eqv_table[reg];
1062  eqv->next = eqv->prev = -1;
1063}
1064
1065/* Make reg NEW equivalent to reg OLD.
1066   OLD is not changing; NEW is.  */
1067
1068static void
1069make_regs_eqv (new, old)
1070     unsigned int new, old;
1071{
1072  unsigned int lastr, firstr;
1073  int q = REG_QTY (old);
1074  struct qty_table_elem *ent;
1075
1076  ent = &qty_table[q];
1077
1078  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1079  if (! REGNO_QTY_VALID_P (old))
1080    abort ();
1081
1082  REG_QTY (new) = q;
1083  firstr = ent->first_reg;
1084  lastr = ent->last_reg;
1085
1086  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1087     hard regs.  Among pseudos, if NEW will live longer than any other reg
1088     of the same qty, and that is beyond the current basic block,
1089     make it the new canonical replacement for this qty.  */
1090  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1091      /* Certain fixed registers might be of the class NO_REGS.  This means
1092	 that not only can they not be allocated by the compiler, but
1093	 they cannot be used in substitutions or canonicalizations
1094	 either.  */
1095      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1096      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1097	  || (new >= FIRST_PSEUDO_REGISTER
1098	      && (firstr < FIRST_PSEUDO_REGISTER
1099		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1100		       || (uid_cuid[REGNO_FIRST_UID (new)]
1101			   < cse_basic_block_start))
1102		      && (uid_cuid[REGNO_LAST_UID (new)]
1103			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1104    {
1105      reg_eqv_table[firstr].prev = new;
1106      reg_eqv_table[new].next = firstr;
1107      reg_eqv_table[new].prev = -1;
1108      ent->first_reg = new;
1109    }
1110  else
1111    {
1112      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1113	 Otherwise, insert before any non-fixed hard regs that are at the
1114	 end.  Registers of class NO_REGS cannot be used as an
1115	 equivalent for anything.  */
1116      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1117	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1118	     && new >= FIRST_PSEUDO_REGISTER)
1119	lastr = reg_eqv_table[lastr].prev;
1120      reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1121      if (reg_eqv_table[lastr].next >= 0)
1122	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1123      else
1124	qty_table[q].last_reg = new;
1125      reg_eqv_table[lastr].next = new;
1126      reg_eqv_table[new].prev = lastr;
1127    }
1128}
1129
1130/* Remove REG from its equivalence class.  */
1131
1132static void
1133delete_reg_equiv (reg)
1134     unsigned int reg;
1135{
1136  struct qty_table_elem *ent;
1137  int q = REG_QTY (reg);
1138  int p, n;
1139
1140  /* If invalid, do nothing.  */
1141  if (q == (int) reg)
1142    return;
1143
1144  ent = &qty_table[q];
1145
1146  p = reg_eqv_table[reg].prev;
1147  n = reg_eqv_table[reg].next;
1148
1149  if (n != -1)
1150    reg_eqv_table[n].prev = p;
1151  else
1152    ent->last_reg = p;
1153  if (p != -1)
1154    reg_eqv_table[p].next = n;
1155  else
1156    ent->first_reg = n;
1157
1158  REG_QTY (reg) = reg;
1159}
1160
1161/* Remove any invalid expressions from the hash table
1162   that refer to any of the registers contained in expression X.
1163
1164   Make sure that newly inserted references to those registers
1165   as subexpressions will be considered valid.
1166
1167   mention_regs is not called when a register itself
1168   is being stored in the table.
1169
1170   Return 1 if we have done something that may have changed the hash code
1171   of X.  */
1172
1173static int
1174mention_regs (x)
1175     rtx x;
1176{
1177  enum rtx_code code;
1178  int i, j;
1179  const char *fmt;
1180  int changed = 0;
1181
1182  if (x == 0)
1183    return 0;
1184
1185  code = GET_CODE (x);
1186  if (code == REG)
1187    {
1188      unsigned int regno = REGNO (x);
1189      unsigned int endregno
1190	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1191		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1192      unsigned int i;
1193
1194      for (i = regno; i < endregno; i++)
1195	{
1196	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1197	    remove_invalid_refs (i);
1198
1199	  REG_IN_TABLE (i) = REG_TICK (i);
1200	}
1201
1202      return 0;
1203    }
1204
1205  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1206     pseudo if they don't use overlapping words.  We handle only pseudos
1207     here for simplicity.  */
1208  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1209      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1210    {
1211      unsigned int i = REGNO (SUBREG_REG (x));
1212
1213      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1214	{
1215	  /* If reg_tick has been incremented more than once since
1216	     reg_in_table was last set, that means that the entire
1217	     register has been set before, so discard anything memorized
1218	     for the entire register, including all SUBREG expressions.  */
1219	  if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1220	    remove_invalid_refs (i);
1221	  else
1222	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1223	}
1224
1225      REG_IN_TABLE (i) = REG_TICK (i);
1226      return 0;
1227    }
1228
1229  /* If X is a comparison or a COMPARE and either operand is a register
1230     that does not have a quantity, give it one.  This is so that a later
1231     call to record_jump_equiv won't cause X to be assigned a different
1232     hash code and not found in the table after that call.
1233
1234     It is not necessary to do this here, since rehash_using_reg can
1235     fix up the table later, but doing this here eliminates the need to
1236     call that expensive function in the most common case where the only
1237     use of the register is in the comparison.  */
1238
1239  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1240    {
1241      if (GET_CODE (XEXP (x, 0)) == REG
1242	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1243	if (insert_regs (XEXP (x, 0), NULL, 0))
1244	  {
1245	    rehash_using_reg (XEXP (x, 0));
1246	    changed = 1;
1247	  }
1248
1249      if (GET_CODE (XEXP (x, 1)) == REG
1250	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1251	if (insert_regs (XEXP (x, 1), NULL, 0))
1252	  {
1253	    rehash_using_reg (XEXP (x, 1));
1254	    changed = 1;
1255	  }
1256    }
1257
1258  fmt = GET_RTX_FORMAT (code);
1259  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1260    if (fmt[i] == 'e')
1261      changed |= mention_regs (XEXP (x, i));
1262    else if (fmt[i] == 'E')
1263      for (j = 0; j < XVECLEN (x, i); j++)
1264	changed |= mention_regs (XVECEXP (x, i, j));
1265
1266  return changed;
1267}
1268
1269/* Update the register quantities for inserting X into the hash table
1270   with a value equivalent to CLASSP.
1271   (If the class does not contain a REG, it is irrelevant.)
1272   If MODIFIED is nonzero, X is a destination; it is being modified.
1273   Note that delete_reg_equiv should be called on a register
1274   before insert_regs is done on that register with MODIFIED != 0.
1275
1276   Nonzero value means that elements of reg_qty have changed
1277   so X's hash code may be different.  */
1278
1279static int
1280insert_regs (x, classp, modified)
1281     rtx x;
1282     struct table_elt *classp;
1283     int modified;
1284{
1285  if (GET_CODE (x) == REG)
1286    {
1287      unsigned int regno = REGNO (x);
1288      int qty_valid;
1289
1290      /* If REGNO is in the equivalence table already but is of the
1291	 wrong mode for that equivalence, don't do anything here.  */
1292
1293      qty_valid = REGNO_QTY_VALID_P (regno);
1294      if (qty_valid)
1295	{
1296	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1297
1298	  if (ent->mode != GET_MODE (x))
1299	    return 0;
1300	}
1301
1302      if (modified || ! qty_valid)
1303	{
1304	  if (classp)
1305	    for (classp = classp->first_same_value;
1306		 classp != 0;
1307		 classp = classp->next_same_value)
1308	      if (GET_CODE (classp->exp) == REG
1309		  && GET_MODE (classp->exp) == GET_MODE (x))
1310		{
1311		  make_regs_eqv (regno, REGNO (classp->exp));
1312		  return 1;
1313		}
1314
1315	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1316	     than REG_IN_TABLE to find out if there was only a single preceding
1317	     invalidation - for the SUBREG - or another one, which would be
1318	     for the full register.  However, if we find here that REG_TICK
1319	     indicates that the register is invalid, it means that it has
1320	     been invalidated in a separate operation.  The SUBREG might be used
1321	     now (then this is a recursive call), or we might use the full REG
1322	     now and a SUBREG of it later.  So bump up REG_TICK so that
1323	     mention_regs will do the right thing.  */
1324	  if (! modified
1325	      && REG_IN_TABLE (regno) >= 0
1326	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1327	    REG_TICK (regno)++;
1328	  make_new_qty (regno, GET_MODE (x));
1329	  return 1;
1330	}
1331
1332      return 0;
1333    }
1334
1335  /* If X is a SUBREG, we will likely be inserting the inner register in the
1336     table.  If that register doesn't have an assigned quantity number at
1337     this point but does later, the insertion that we will be doing now will
1338     not be accessible because its hash code will have changed.  So assign
1339     a quantity number now.  */
1340
1341  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1342	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1343    {
1344      insert_regs (SUBREG_REG (x), NULL, 0);
1345      mention_regs (x);
1346      return 1;
1347    }
1348  else
1349    return mention_regs (x);
1350}
1351
1352/* Look in or update the hash table.  */
1353
1354/* Remove table element ELT from use in the table.
1355   HASH is its hash code, made using the HASH macro.
1356   It's an argument because often that is known in advance
1357   and we save much time not recomputing it.  */
1358
1359static void
1360remove_from_table (elt, hash)
1361     struct table_elt *elt;
1362     unsigned hash;
1363{
1364  if (elt == 0)
1365    return;
1366
1367  /* Mark this element as removed.  See cse_insn.  */
1368  elt->first_same_value = 0;
1369
1370  /* Remove the table element from its equivalence class.  */
1371
1372  {
1373    struct table_elt *prev = elt->prev_same_value;
1374    struct table_elt *next = elt->next_same_value;
1375
1376    if (next)
1377      next->prev_same_value = prev;
1378
1379    if (prev)
1380      prev->next_same_value = next;
1381    else
1382      {
1383	struct table_elt *newfirst = next;
1384	while (next)
1385	  {
1386	    next->first_same_value = newfirst;
1387	    next = next->next_same_value;
1388	  }
1389      }
1390  }
1391
1392  /* Remove the table element from its hash bucket.  */
1393
1394  {
1395    struct table_elt *prev = elt->prev_same_hash;
1396    struct table_elt *next = elt->next_same_hash;
1397
1398    if (next)
1399      next->prev_same_hash = prev;
1400
1401    if (prev)
1402      prev->next_same_hash = next;
1403    else if (table[hash] == elt)
1404      table[hash] = next;
1405    else
1406      {
1407	/* This entry is not in the proper hash bucket.  This can happen
1408	   when two classes were merged by `merge_equiv_classes'.  Search
1409	   for the hash bucket that it heads.  This happens only very
1410	   rarely, so the cost is acceptable.  */
1411	for (hash = 0; hash < HASH_SIZE; hash++)
1412	  if (table[hash] == elt)
1413	    table[hash] = next;
1414      }
1415  }
1416
1417  /* Remove the table element from its related-value circular chain.  */
1418
1419  if (elt->related_value != 0 && elt->related_value != elt)
1420    {
1421      struct table_elt *p = elt->related_value;
1422
1423      while (p->related_value != elt)
1424	p = p->related_value;
1425      p->related_value = elt->related_value;
1426      if (p->related_value == p)
1427	p->related_value = 0;
1428    }
1429
1430  /* Now add it to the free element chain.  */
1431  elt->next_same_hash = free_element_chain;
1432  free_element_chain = elt;
1433}
1434
1435/* Look up X in the hash table and return its table element,
1436   or 0 if X is not in the table.
1437
1438   MODE is the machine-mode of X, or if X is an integer constant
1439   with VOIDmode then MODE is the mode with which X will be used.
1440
1441   Here we are satisfied to find an expression whose tree structure
1442   looks like X.  */
1443
1444static struct table_elt *
1445lookup (x, hash, mode)
1446     rtx x;
1447     unsigned hash;
1448     enum machine_mode mode;
1449{
1450  struct table_elt *p;
1451
1452  for (p = table[hash]; p; p = p->next_same_hash)
1453    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1454			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1455      return p;
1456
1457  return 0;
1458}
1459
1460/* Like `lookup' but don't care whether the table element uses invalid regs.
1461   Also ignore discrepancies in the machine mode of a register.  */
1462
1463static struct table_elt *
1464lookup_for_remove (x, hash, mode)
1465     rtx x;
1466     unsigned hash;
1467     enum machine_mode mode;
1468{
1469  struct table_elt *p;
1470
1471  if (GET_CODE (x) == REG)
1472    {
1473      unsigned int regno = REGNO (x);
1474
1475      /* Don't check the machine mode when comparing registers;
1476	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1477      for (p = table[hash]; p; p = p->next_same_hash)
1478	if (GET_CODE (p->exp) == REG
1479	    && REGNO (p->exp) == regno)
1480	  return p;
1481    }
1482  else
1483    {
1484      for (p = table[hash]; p; p = p->next_same_hash)
1485	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1486	  return p;
1487    }
1488
1489  return 0;
1490}
1491
1492/* Look for an expression equivalent to X and with code CODE.
1493   If one is found, return that expression.  */
1494
1495static rtx
1496lookup_as_function (x, code)
1497     rtx x;
1498     enum rtx_code code;
1499{
1500  struct table_elt *p
1501    = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1502
1503  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1504     long as we are narrowing.  So if we looked in vain for a mode narrower
1505     than word_mode before, look for word_mode now.  */
1506  if (p == 0 && code == CONST_INT
1507      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1508    {
1509      x = copy_rtx (x);
1510      PUT_MODE (x, word_mode);
1511      p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1512    }
1513
1514  if (p == 0)
1515    return 0;
1516
1517  for (p = p->first_same_value; p; p = p->next_same_value)
1518    if (GET_CODE (p->exp) == code
1519	/* Make sure this is a valid entry in the table.  */
1520	&& exp_equiv_p (p->exp, p->exp, 1, 0))
1521      return p->exp;
1522
1523  return 0;
1524}
1525
1526/* Insert X in the hash table, assuming HASH is its hash code
1527   and CLASSP is an element of the class it should go in
1528   (or 0 if a new class should be made).
1529   It is inserted at the proper position to keep the class in
1530   the order cheapest first.
1531
1532   MODE is the machine-mode of X, or if X is an integer constant
1533   with VOIDmode then MODE is the mode with which X will be used.
1534
1535   For elements of equal cheapness, the most recent one
1536   goes in front, except that the first element in the list
1537   remains first unless a cheaper element is added.  The order of
1538   pseudo-registers does not matter, as canon_reg will be called to
1539   find the cheapest when a register is retrieved from the table.
1540
1541   The in_memory field in the hash table element is set to 0.
1542   The caller must set it nonzero if appropriate.
1543
1544   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1545   and if insert_regs returns a nonzero value
1546   you must then recompute its hash code before calling here.
1547
1548   If necessary, update table showing constant values of quantities.  */
1549
1550#define CHEAPER(X, Y) \
1551 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1552
1553static struct table_elt *
1554insert (x, classp, hash, mode)
1555     rtx x;
1556     struct table_elt *classp;
1557     unsigned hash;
1558     enum machine_mode mode;
1559{
1560  struct table_elt *elt;
1561
1562  /* If X is a register and we haven't made a quantity for it,
1563     something is wrong.  */
1564  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1565    abort ();
1566
1567  /* If X is a hard register, show it is being put in the table.  */
1568  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1569    {
1570      unsigned int regno = REGNO (x);
1571      unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1572      unsigned int i;
1573
1574      for (i = regno; i < endregno; i++)
1575	SET_HARD_REG_BIT (hard_regs_in_table, i);
1576    }
1577
1578  /* Put an element for X into the right hash bucket.  */
1579
1580  elt = free_element_chain;
1581  if (elt)
1582    free_element_chain = elt->next_same_hash;
1583  else
1584    {
1585      n_elements_made++;
1586      elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1587    }
1588
1589  elt->exp = x;
1590  elt->canon_exp = NULL_RTX;
1591  elt->cost = COST (x);
1592  elt->regcost = approx_reg_cost (x);
1593  elt->next_same_value = 0;
1594  elt->prev_same_value = 0;
1595  elt->next_same_hash = table[hash];
1596  elt->prev_same_hash = 0;
1597  elt->related_value = 0;
1598  elt->in_memory = 0;
1599  elt->mode = mode;
1600  elt->is_const = (CONSTANT_P (x)
1601		   /* GNU C++ takes advantage of this for `this'
1602		      (and other const values).  */
1603		   || (RTX_UNCHANGING_P (x)
1604		       && GET_CODE (x) == REG
1605		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1606		   || FIXED_BASE_PLUS_P (x));
1607
1608  if (table[hash])
1609    table[hash]->prev_same_hash = elt;
1610  table[hash] = elt;
1611
1612  /* Put it into the proper value-class.  */
1613  if (classp)
1614    {
1615      classp = classp->first_same_value;
1616      if (CHEAPER (elt, classp))
1617	/* Insert at the head of the class */
1618	{
1619	  struct table_elt *p;
1620	  elt->next_same_value = classp;
1621	  classp->prev_same_value = elt;
1622	  elt->first_same_value = elt;
1623
1624	  for (p = classp; p; p = p->next_same_value)
1625	    p->first_same_value = elt;
1626	}
1627      else
1628	{
1629	  /* Insert not at head of the class.  */
1630	  /* Put it after the last element cheaper than X.  */
1631	  struct table_elt *p, *next;
1632
1633	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1634	       p = next);
1635
1636	  /* Put it after P and before NEXT.  */
1637	  elt->next_same_value = next;
1638	  if (next)
1639	    next->prev_same_value = elt;
1640
1641	  elt->prev_same_value = p;
1642	  p->next_same_value = elt;
1643	  elt->first_same_value = classp;
1644	}
1645    }
1646  else
1647    elt->first_same_value = elt;
1648
1649  /* If this is a constant being set equivalent to a register or a register
1650     being set equivalent to a constant, note the constant equivalence.
1651
1652     If this is a constant, it cannot be equivalent to a different constant,
1653     and a constant is the only thing that can be cheaper than a register.  So
1654     we know the register is the head of the class (before the constant was
1655     inserted).
1656
1657     If this is a register that is not already known equivalent to a
1658     constant, we must check the entire class.
1659
1660     If this is a register that is already known equivalent to an insn,
1661     update the qtys `const_insn' to show that `this_insn' is the latest
1662     insn making that quantity equivalent to the constant.  */
1663
1664  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1665      && GET_CODE (x) != REG)
1666    {
1667      int exp_q = REG_QTY (REGNO (classp->exp));
1668      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1669
1670      exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1671      exp_ent->const_insn = this_insn;
1672    }
1673
1674  else if (GET_CODE (x) == REG
1675	   && classp
1676	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1677	   && ! elt->is_const)
1678    {
1679      struct table_elt *p;
1680
1681      for (p = classp; p != 0; p = p->next_same_value)
1682	{
1683	  if (p->is_const && GET_CODE (p->exp) != REG)
1684	    {
1685	      int x_q = REG_QTY (REGNO (x));
1686	      struct qty_table_elem *x_ent = &qty_table[x_q];
1687
1688	      x_ent->const_rtx
1689		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1690	      x_ent->const_insn = this_insn;
1691	      break;
1692	    }
1693	}
1694    }
1695
1696  else if (GET_CODE (x) == REG
1697	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1698	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1699    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1700
1701  /* If this is a constant with symbolic value,
1702     and it has a term with an explicit integer value,
1703     link it up with related expressions.  */
1704  if (GET_CODE (x) == CONST)
1705    {
1706      rtx subexp = get_related_value (x);
1707      unsigned subhash;
1708      struct table_elt *subelt, *subelt_prev;
1709
1710      if (subexp != 0)
1711	{
1712	  /* Get the integer-free subexpression in the hash table.  */
1713	  subhash = safe_hash (subexp, mode) & HASH_MASK;
1714	  subelt = lookup (subexp, subhash, mode);
1715	  if (subelt == 0)
1716	    subelt = insert (subexp, NULL, subhash, mode);
1717	  /* Initialize SUBELT's circular chain if it has none.  */
1718	  if (subelt->related_value == 0)
1719	    subelt->related_value = subelt;
1720	  /* Find the element in the circular chain that precedes SUBELT.  */
1721	  subelt_prev = subelt;
1722	  while (subelt_prev->related_value != subelt)
1723	    subelt_prev = subelt_prev->related_value;
1724	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1725	     This way the element that follows SUBELT is the oldest one.  */
1726	  elt->related_value = subelt_prev->related_value;
1727	  subelt_prev->related_value = elt;
1728	}
1729    }
1730
1731  return elt;
1732}
1733
1734/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1735   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1736   the two classes equivalent.
1737
1738   CLASS1 will be the surviving class; CLASS2 should not be used after this
1739   call.
1740
1741   Any invalid entries in CLASS2 will not be copied.  */
1742
1743static void
1744merge_equiv_classes (class1, class2)
1745     struct table_elt *class1, *class2;
1746{
1747  struct table_elt *elt, *next, *new;
1748
1749  /* Ensure we start with the head of the classes.  */
1750  class1 = class1->first_same_value;
1751  class2 = class2->first_same_value;
1752
1753  /* If they were already equal, forget it.  */
1754  if (class1 == class2)
1755    return;
1756
1757  for (elt = class2; elt; elt = next)
1758    {
1759      unsigned int hash;
1760      rtx exp = elt->exp;
1761      enum machine_mode mode = elt->mode;
1762
1763      next = elt->next_same_value;
1764
1765      /* Remove old entry, make a new one in CLASS1's class.
1766	 Don't do this for invalid entries as we cannot find their
1767	 hash code (it also isn't necessary).  */
1768      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1769	{
1770	  hash_arg_in_memory = 0;
1771	  hash = HASH (exp, mode);
1772
1773	  if (GET_CODE (exp) == REG)
1774	    delete_reg_equiv (REGNO (exp));
1775
1776	  remove_from_table (elt, hash);
1777
1778	  if (insert_regs (exp, class1, 0))
1779	    {
1780	      rehash_using_reg (exp);
1781	      hash = HASH (exp, mode);
1782	    }
1783	  new = insert (exp, class1, hash, mode);
1784	  new->in_memory = hash_arg_in_memory;
1785	}
1786    }
1787}
1788
1789/* Flush the entire hash table.  */
1790
1791static void
1792flush_hash_table ()
1793{
1794  int i;
1795  struct table_elt *p;
1796
1797  for (i = 0; i < HASH_SIZE; i++)
1798    for (p = table[i]; p; p = table[i])
1799      {
1800	/* Note that invalidate can remove elements
1801	   after P in the current hash chain.  */
1802	if (GET_CODE (p->exp) == REG)
1803	  invalidate (p->exp, p->mode);
1804	else
1805	  remove_from_table (p, i);
1806      }
1807}
1808
1809/* Function called for each rtx to check whether true dependence exist.  */
1810struct check_dependence_data
1811{
1812  enum machine_mode mode;
1813  rtx exp;
1814};
1815
1816static int
1817check_dependence (x, data)
1818     rtx *x;
1819     void *data;
1820{
1821  struct check_dependence_data *d = (struct check_dependence_data *) data;
1822  if (*x && GET_CODE (*x) == MEM)
1823    return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1824  else
1825    return 0;
1826}
1827
1828/* Remove from the hash table, or mark as invalid, all expressions whose
1829   values could be altered by storing in X.  X is a register, a subreg, or
1830   a memory reference with nonvarying address (because, when a memory
1831   reference with a varying address is stored in, all memory references are
1832   removed by invalidate_memory so specific invalidation is superfluous).
1833   FULL_MODE, if not VOIDmode, indicates that this much should be
1834   invalidated instead of just the amount indicated by the mode of X.  This
1835   is only used for bitfield stores into memory.
1836
1837   A nonvarying address may be just a register or just a symbol reference,
1838   or it may be either of those plus a numeric offset.  */
1839
1840static void
1841invalidate (x, full_mode)
1842     rtx x;
1843     enum machine_mode full_mode;
1844{
1845  int i;
1846  struct table_elt *p;
1847
1848  switch (GET_CODE (x))
1849    {
1850    case REG:
1851      {
1852	/* If X is a register, dependencies on its contents are recorded
1853	   through the qty number mechanism.  Just change the qty number of
1854	   the register, mark it as invalid for expressions that refer to it,
1855	   and remove it itself.  */
1856	unsigned int regno = REGNO (x);
1857	unsigned int hash = HASH (x, GET_MODE (x));
1858
1859	/* Remove REGNO from any quantity list it might be on and indicate
1860	   that its value might have changed.  If it is a pseudo, remove its
1861	   entry from the hash table.
1862
1863	   For a hard register, we do the first two actions above for any
1864	   additional hard registers corresponding to X.  Then, if any of these
1865	   registers are in the table, we must remove any REG entries that
1866	   overlap these registers.  */
1867
1868	delete_reg_equiv (regno);
1869	REG_TICK (regno)++;
1870
1871	if (regno >= FIRST_PSEUDO_REGISTER)
1872	  {
1873	    /* Because a register can be referenced in more than one mode,
1874	       we might have to remove more than one table entry.  */
1875	    struct table_elt *elt;
1876
1877	    while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1878	      remove_from_table (elt, hash);
1879	  }
1880	else
1881	  {
1882	    HOST_WIDE_INT in_table
1883	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1884	    unsigned int endregno
1885	      = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1886	    unsigned int tregno, tendregno, rn;
1887	    struct table_elt *p, *next;
1888
1889	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1890
1891	    for (rn = regno + 1; rn < endregno; rn++)
1892	      {
1893		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1894		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1895		delete_reg_equiv (rn);
1896		REG_TICK (rn)++;
1897	      }
1898
1899	    if (in_table)
1900	      for (hash = 0; hash < HASH_SIZE; hash++)
1901		for (p = table[hash]; p; p = next)
1902		  {
1903		    next = p->next_same_hash;
1904
1905		    if (GET_CODE (p->exp) != REG
1906			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1907		      continue;
1908
1909		    tregno = REGNO (p->exp);
1910		    tendregno
1911		      = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1912		    if (tendregno > regno && tregno < endregno)
1913		      remove_from_table (p, hash);
1914		  }
1915	  }
1916      }
1917      return;
1918
1919    case SUBREG:
1920      invalidate (SUBREG_REG (x), VOIDmode);
1921      return;
1922
1923    case PARALLEL:
1924      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1925	invalidate (XVECEXP (x, 0, i), VOIDmode);
1926      return;
1927
1928    case EXPR_LIST:
1929      /* This is part of a disjoint return value; extract the location in
1930	 question ignoring the offset.  */
1931      invalidate (XEXP (x, 0), VOIDmode);
1932      return;
1933
1934    case MEM:
1935      /* Calculate the canonical version of X here so that
1936	 true_dependence doesn't generate new RTL for X on each call.  */
1937      x = canon_rtx (x);
1938
1939      /* Remove all hash table elements that refer to overlapping pieces of
1940	 memory.  */
1941      if (full_mode == VOIDmode)
1942	full_mode = GET_MODE (x);
1943
1944      for (i = 0; i < HASH_SIZE; i++)
1945	{
1946	  struct table_elt *next;
1947
1948	  for (p = table[i]; p; p = next)
1949	    {
1950	      next = p->next_same_hash;
1951	      if (p->in_memory)
1952		{
1953		  struct check_dependence_data d;
1954
1955		  /* Just canonicalize the expression once;
1956		     otherwise each time we call invalidate
1957		     true_dependence will canonicalize the
1958		     expression again.  */
1959		  if (!p->canon_exp)
1960		    p->canon_exp = canon_rtx (p->exp);
1961		  d.exp = x;
1962		  d.mode = full_mode;
1963		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1964		    remove_from_table (p, i);
1965		}
1966	    }
1967	}
1968      return;
1969
1970    default:
1971      abort ();
1972    }
1973}
1974
1975/* Remove all expressions that refer to register REGNO,
1976   since they are already invalid, and we are about to
1977   mark that register valid again and don't want the old
1978   expressions to reappear as valid.  */
1979
1980static void
1981remove_invalid_refs (regno)
1982     unsigned int regno;
1983{
1984  unsigned int i;
1985  struct table_elt *p, *next;
1986
1987  for (i = 0; i < HASH_SIZE; i++)
1988    for (p = table[i]; p; p = next)
1989      {
1990	next = p->next_same_hash;
1991	if (GET_CODE (p->exp) != REG
1992	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
1993	  remove_from_table (p, i);
1994      }
1995}
1996
1997/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1998   and mode MODE.  */
1999static void
2000remove_invalid_subreg_refs (regno, offset, mode)
2001     unsigned int regno;
2002     unsigned int offset;
2003     enum machine_mode mode;
2004{
2005  unsigned int i;
2006  struct table_elt *p, *next;
2007  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2008
2009  for (i = 0; i < HASH_SIZE; i++)
2010    for (p = table[i]; p; p = next)
2011      {
2012	rtx exp = p->exp;
2013	next = p->next_same_hash;
2014
2015	if (GET_CODE (exp) != REG
2016	    && (GET_CODE (exp) != SUBREG
2017		|| GET_CODE (SUBREG_REG (exp)) != REG
2018		|| REGNO (SUBREG_REG (exp)) != regno
2019		|| (((SUBREG_BYTE (exp)
2020		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2021		    && SUBREG_BYTE (exp) <= end))
2022	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
2023	  remove_from_table (p, i);
2024      }
2025}
2026
2027/* Recompute the hash codes of any valid entries in the hash table that
2028   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2029
2030   This is called when we make a jump equivalence.  */
2031
2032static void
2033rehash_using_reg (x)
2034     rtx x;
2035{
2036  unsigned int i;
2037  struct table_elt *p, *next;
2038  unsigned hash;
2039
2040  if (GET_CODE (x) == SUBREG)
2041    x = SUBREG_REG (x);
2042
2043  /* If X is not a register or if the register is known not to be in any
2044     valid entries in the table, we have no work to do.  */
2045
2046  if (GET_CODE (x) != REG
2047      || REG_IN_TABLE (REGNO (x)) < 0
2048      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2049    return;
2050
2051  /* Scan all hash chains looking for valid entries that mention X.
2052     If we find one and it is in the wrong hash chain, move it.  We can skip
2053     objects that are registers, since they are handled specially.  */
2054
2055  for (i = 0; i < HASH_SIZE; i++)
2056    for (p = table[i]; p; p = next)
2057      {
2058	next = p->next_same_hash;
2059	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2060	    && exp_equiv_p (p->exp, p->exp, 1, 0)
2061	    && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2062	  {
2063	    if (p->next_same_hash)
2064	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2065
2066	    if (p->prev_same_hash)
2067	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2068	    else
2069	      table[i] = p->next_same_hash;
2070
2071	    p->next_same_hash = table[hash];
2072	    p->prev_same_hash = 0;
2073	    if (table[hash])
2074	      table[hash]->prev_same_hash = p;
2075	    table[hash] = p;
2076	  }
2077      }
2078}
2079
2080/* Remove from the hash table any expression that is a call-clobbered
2081   register.  Also update their TICK values.  */
2082
2083static void
2084invalidate_for_call ()
2085{
2086  unsigned int regno, endregno;
2087  unsigned int i;
2088  unsigned hash;
2089  struct table_elt *p, *next;
2090  int in_table = 0;
2091
2092  /* Go through all the hard registers.  For each that is clobbered in
2093     a CALL_INSN, remove the register from quantity chains and update
2094     reg_tick if defined.  Also see if any of these registers is currently
2095     in the table.  */
2096
2097  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2098    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2099      {
2100	delete_reg_equiv (regno);
2101	if (REG_TICK (regno) >= 0)
2102	  REG_TICK (regno)++;
2103
2104	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2105      }
2106
2107  /* In the case where we have no call-clobbered hard registers in the
2108     table, we are done.  Otherwise, scan the table and remove any
2109     entry that overlaps a call-clobbered register.  */
2110
2111  if (in_table)
2112    for (hash = 0; hash < HASH_SIZE; hash++)
2113      for (p = table[hash]; p; p = next)
2114	{
2115	  next = p->next_same_hash;
2116
2117	  if (GET_CODE (p->exp) != REG
2118	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2119	    continue;
2120
2121	  regno = REGNO (p->exp);
2122	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2123
2124	  for (i = regno; i < endregno; i++)
2125	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2126	      {
2127		remove_from_table (p, hash);
2128		break;
2129	      }
2130	}
2131}
2132
2133/* Given an expression X of type CONST,
2134   and ELT which is its table entry (or 0 if it
2135   is not in the hash table),
2136   return an alternate expression for X as a register plus integer.
2137   If none can be found, return 0.  */
2138
2139static rtx
2140use_related_value (x, elt)
2141     rtx x;
2142     struct table_elt *elt;
2143{
2144  struct table_elt *relt = 0;
2145  struct table_elt *p, *q;
2146  HOST_WIDE_INT offset;
2147
2148  /* First, is there anything related known?
2149     If we have a table element, we can tell from that.
2150     Otherwise, must look it up.  */
2151
2152  if (elt != 0 && elt->related_value != 0)
2153    relt = elt;
2154  else if (elt == 0 && GET_CODE (x) == CONST)
2155    {
2156      rtx subexp = get_related_value (x);
2157      if (subexp != 0)
2158	relt = lookup (subexp,
2159		       safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2160		       GET_MODE (subexp));
2161    }
2162
2163  if (relt == 0)
2164    return 0;
2165
2166  /* Search all related table entries for one that has an
2167     equivalent register.  */
2168
2169  p = relt;
2170  while (1)
2171    {
2172      /* This loop is strange in that it is executed in two different cases.
2173	 The first is when X is already in the table.  Then it is searching
2174	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2175	 X is not in the table.  Then RELT points to a class for the related
2176	 value.
2177
2178	 Ensure that, whatever case we are in, that we ignore classes that have
2179	 the same value as X.  */
2180
2181      if (rtx_equal_p (x, p->exp))
2182	q = 0;
2183      else
2184	for (q = p->first_same_value; q; q = q->next_same_value)
2185	  if (GET_CODE (q->exp) == REG)
2186	    break;
2187
2188      if (q)
2189	break;
2190
2191      p = p->related_value;
2192
2193      /* We went all the way around, so there is nothing to be found.
2194	 Alternatively, perhaps RELT was in the table for some other reason
2195	 and it has no related values recorded.  */
2196      if (p == relt || p == 0)
2197	break;
2198    }
2199
2200  if (q == 0)
2201    return 0;
2202
2203  offset = (get_integer_term (x) - get_integer_term (p->exp));
2204  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2205  return plus_constant (q->exp, offset);
2206}
2207
2208/* Hash a string.  Just add its bytes up.  */
2209static inline unsigned
2210canon_hash_string (ps)
2211     const char *ps;
2212{
2213  unsigned hash = 0;
2214  const unsigned char *p = (const unsigned char *)ps;
2215
2216  if (p)
2217    while (*p)
2218      hash += *p++;
2219
2220  return hash;
2221}
2222
2223/* Hash an rtx.  We are careful to make sure the value is never negative.
2224   Equivalent registers hash identically.
2225   MODE is used in hashing for CONST_INTs only;
2226   otherwise the mode of X is used.
2227
2228   Store 1 in do_not_record if any subexpression is volatile.
2229
2230   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2231   which does not have the RTX_UNCHANGING_P bit set.
2232
2233   Note that cse_insn knows that the hash code of a MEM expression
2234   is just (int) MEM plus the hash code of the address.  */
2235
2236static unsigned
2237canon_hash (x, mode)
2238     rtx x;
2239     enum machine_mode mode;
2240{
2241  int i, j;
2242  unsigned hash = 0;
2243  enum rtx_code code;
2244  const char *fmt;
2245
2246  /* repeat is used to turn tail-recursion into iteration.  */
2247 repeat:
2248  if (x == 0)
2249    return hash;
2250
2251  code = GET_CODE (x);
2252  switch (code)
2253    {
2254    case REG:
2255      {
2256	unsigned int regno = REGNO (x);
2257
2258	/* On some machines, we can't record any non-fixed hard register,
2259	   because extending its life will cause reload problems.  We
2260	   consider ap, fp, and sp to be fixed for this purpose.
2261
2262	   We also consider CCmode registers to be fixed for this purpose;
2263	   failure to do so leads to failure to simplify 0<100 type of
2264	   conditionals.
2265
2266	   On all machines, we can't record any global registers.
2267	   Nor should we record any register that is in a small
2268	   class, as defined by CLASS_LIKELY_SPILLED_P.  */
2269
2270	if (regno < FIRST_PSEUDO_REGISTER
2271	    && (global_regs[regno]
2272		|| CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno))
2273		|| (SMALL_REGISTER_CLASSES
2274		    && ! fixed_regs[regno]
2275		    && x != frame_pointer_rtx
2276		    && x != hard_frame_pointer_rtx
2277		    && x != arg_pointer_rtx
2278		    && x != stack_pointer_rtx
2279		    && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2280	  {
2281	    do_not_record = 1;
2282	    return 0;
2283	  }
2284
2285	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2286	return hash;
2287      }
2288
2289    /* We handle SUBREG of a REG specially because the underlying
2290       reg changes its hash value with every value change; we don't
2291       want to have to forget unrelated subregs when one subreg changes.  */
2292    case SUBREG:
2293      {
2294	if (GET_CODE (SUBREG_REG (x)) == REG)
2295	  {
2296	    hash += (((unsigned) SUBREG << 7)
2297		     + REGNO (SUBREG_REG (x))
2298		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2299	    return hash;
2300	  }
2301	break;
2302      }
2303
2304    case CONST_INT:
2305      {
2306	unsigned HOST_WIDE_INT tem = INTVAL (x);
2307	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2308	return hash;
2309      }
2310
2311    case CONST_DOUBLE:
2312      /* This is like the general case, except that it only counts
2313	 the integers representing the constant.  */
2314      hash += (unsigned) code + (unsigned) GET_MODE (x);
2315      if (GET_MODE (x) != VOIDmode)
2316	for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2317	  {
2318	    unsigned HOST_WIDE_INT tem = XWINT (x, i);
2319	    hash += tem;
2320	  }
2321      else
2322	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2323		 + (unsigned) CONST_DOUBLE_HIGH (x));
2324      return hash;
2325
2326      /* Assume there is only one rtx object for any given label.  */
2327    case LABEL_REF:
2328      hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2329      return hash;
2330
2331    case SYMBOL_REF:
2332      hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2333      return hash;
2334
2335    case MEM:
2336      /* We don't record if marked volatile or if BLKmode since we don't
2337	 know the size of the move.  */
2338      if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2339	{
2340	  do_not_record = 1;
2341	  return 0;
2342	}
2343      if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2344	{
2345	  hash_arg_in_memory = 1;
2346	}
2347      /* Now that we have already found this special case,
2348	 might as well speed it up as much as possible.  */
2349      hash += (unsigned) MEM;
2350      x = XEXP (x, 0);
2351      goto repeat;
2352
2353    case USE:
2354      /* A USE that mentions non-volatile memory needs special
2355	 handling since the MEM may be BLKmode which normally
2356	 prevents an entry from being made.  Pure calls are
2357	 marked by a USE which mentions BLKmode memory.  */
2358      if (GET_CODE (XEXP (x, 0)) == MEM
2359	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2360	{
2361	  hash += (unsigned)USE;
2362	  x = XEXP (x, 0);
2363
2364	  if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2365	    hash_arg_in_memory = 1;
2366
2367	  /* Now that we have already found this special case,
2368	     might as well speed it up as much as possible.  */
2369	  hash += (unsigned) MEM;
2370	  x = XEXP (x, 0);
2371	  goto repeat;
2372	}
2373      break;
2374
2375    case PRE_DEC:
2376    case PRE_INC:
2377    case POST_DEC:
2378    case POST_INC:
2379    case PRE_MODIFY:
2380    case POST_MODIFY:
2381    case PC:
2382    case CC0:
2383    case CALL:
2384    case UNSPEC_VOLATILE:
2385      do_not_record = 1;
2386      return 0;
2387
2388    case ASM_OPERANDS:
2389      if (MEM_VOLATILE_P (x))
2390	{
2391	  do_not_record = 1;
2392	  return 0;
2393	}
2394      else
2395	{
2396	  /* We don't want to take the filename and line into account.  */
2397	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2398	    + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2399	    + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2400	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2401
2402	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2403	    {
2404	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2405		{
2406		  hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2407				       GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2408			   + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2409						(x, i)));
2410		}
2411
2412	      hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2413	      x = ASM_OPERANDS_INPUT (x, 0);
2414	      mode = GET_MODE (x);
2415	      goto repeat;
2416	    }
2417
2418	  return hash;
2419	}
2420      break;
2421
2422    default:
2423      break;
2424    }
2425
2426  i = GET_RTX_LENGTH (code) - 1;
2427  hash += (unsigned) code + (unsigned) GET_MODE (x);
2428  fmt = GET_RTX_FORMAT (code);
2429  for (; i >= 0; i--)
2430    {
2431      if (fmt[i] == 'e')
2432	{
2433	  rtx tem = XEXP (x, i);
2434
2435	  /* If we are about to do the last recursive call
2436	     needed at this level, change it into iteration.
2437	     This function  is called enough to be worth it.  */
2438	  if (i == 0)
2439	    {
2440	      x = tem;
2441	      goto repeat;
2442	    }
2443	  hash += canon_hash (tem, 0);
2444	}
2445      else if (fmt[i] == 'E')
2446	for (j = 0; j < XVECLEN (x, i); j++)
2447	  hash += canon_hash (XVECEXP (x, i, j), 0);
2448      else if (fmt[i] == 's')
2449	hash += canon_hash_string (XSTR (x, i));
2450      else if (fmt[i] == 'i')
2451	{
2452	  unsigned tem = XINT (x, i);
2453	  hash += tem;
2454	}
2455      else if (fmt[i] == '0' || fmt[i] == 't')
2456	/* Unused.  */
2457	;
2458      else
2459	abort ();
2460    }
2461  return hash;
2462}
2463
2464/* Like canon_hash but with no side effects.  */
2465
2466static unsigned
2467safe_hash (x, mode)
2468     rtx x;
2469     enum machine_mode mode;
2470{
2471  int save_do_not_record = do_not_record;
2472  int save_hash_arg_in_memory = hash_arg_in_memory;
2473  unsigned hash = canon_hash (x, mode);
2474  hash_arg_in_memory = save_hash_arg_in_memory;
2475  do_not_record = save_do_not_record;
2476  return hash;
2477}
2478
2479/* Return 1 iff X and Y would canonicalize into the same thing,
2480   without actually constructing the canonicalization of either one.
2481   If VALIDATE is nonzero,
2482   we assume X is an expression being processed from the rtl
2483   and Y was found in the hash table.  We check register refs
2484   in Y for being marked as valid.
2485
2486   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2487   that is known to be in the register.  Ordinarily, we don't allow them
2488   to match, because letting them match would cause unpredictable results
2489   in all the places that search a hash table chain for an equivalent
2490   for a given value.  A possible equivalent that has different structure
2491   has its hash code computed from different data.  Whether the hash code
2492   is the same as that of the given value is pure luck.  */
2493
2494static int
2495exp_equiv_p (x, y, validate, equal_values)
2496     rtx x, y;
2497     int validate;
2498     int equal_values;
2499{
2500  int i, j;
2501  enum rtx_code code;
2502  const char *fmt;
2503
2504  /* Note: it is incorrect to assume an expression is equivalent to itself
2505     if VALIDATE is nonzero.  */
2506  if (x == y && !validate)
2507    return 1;
2508  if (x == 0 || y == 0)
2509    return x == y;
2510
2511  code = GET_CODE (x);
2512  if (code != GET_CODE (y))
2513    {
2514      if (!equal_values)
2515	return 0;
2516
2517      /* If X is a constant and Y is a register or vice versa, they may be
2518	 equivalent.  We only have to validate if Y is a register.  */
2519      if (CONSTANT_P (x) && GET_CODE (y) == REG
2520	  && REGNO_QTY_VALID_P (REGNO (y)))
2521	{
2522	  int y_q = REG_QTY (REGNO (y));
2523	  struct qty_table_elem *y_ent = &qty_table[y_q];
2524
2525	  if (GET_MODE (y) == y_ent->mode
2526	      && rtx_equal_p (x, y_ent->const_rtx)
2527	      && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2528	    return 1;
2529	}
2530
2531      if (CONSTANT_P (y) && code == REG
2532	  && REGNO_QTY_VALID_P (REGNO (x)))
2533	{
2534	  int x_q = REG_QTY (REGNO (x));
2535	  struct qty_table_elem *x_ent = &qty_table[x_q];
2536
2537	  if (GET_MODE (x) == x_ent->mode
2538	      && rtx_equal_p (y, x_ent->const_rtx))
2539	    return 1;
2540	}
2541
2542      return 0;
2543    }
2544
2545  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2546  if (GET_MODE (x) != GET_MODE (y))
2547    return 0;
2548
2549  switch (code)
2550    {
2551    case PC:
2552    case CC0:
2553    case CONST_INT:
2554      return x == y;
2555
2556    case LABEL_REF:
2557      return XEXP (x, 0) == XEXP (y, 0);
2558
2559    case SYMBOL_REF:
2560      return XSTR (x, 0) == XSTR (y, 0);
2561
2562    case REG:
2563      {
2564	unsigned int regno = REGNO (y);
2565	unsigned int endregno
2566	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2567		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2568	unsigned int i;
2569
2570	/* If the quantities are not the same, the expressions are not
2571	   equivalent.  If there are and we are not to validate, they
2572	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2573
2574	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2575	  return 0;
2576
2577	if (! validate)
2578	  return 1;
2579
2580	for (i = regno; i < endregno; i++)
2581	  if (REG_IN_TABLE (i) != REG_TICK (i))
2582	    return 0;
2583
2584	return 1;
2585      }
2586
2587    /*  For commutative operations, check both orders.  */
2588    case PLUS:
2589    case MULT:
2590    case AND:
2591    case IOR:
2592    case XOR:
2593    case NE:
2594    case EQ:
2595      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2596	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2597			       validate, equal_values))
2598	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2599			       validate, equal_values)
2600		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2601				  validate, equal_values)));
2602
2603    case ASM_OPERANDS:
2604      /* We don't use the generic code below because we want to
2605	 disregard filename and line numbers.  */
2606
2607      /* A volatile asm isn't equivalent to any other.  */
2608      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2609	return 0;
2610
2611      if (GET_MODE (x) != GET_MODE (y)
2612	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2613	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2614		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2615	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2616	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2617	return 0;
2618
2619      if (ASM_OPERANDS_INPUT_LENGTH (x))
2620	{
2621	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2622	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2623			       ASM_OPERANDS_INPUT (y, i),
2624			       validate, equal_values)
2625		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2626			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2627	      return 0;
2628	}
2629
2630      return 1;
2631
2632    default:
2633      break;
2634    }
2635
2636  /* Compare the elements.  If any pair of corresponding elements
2637     fail to match, return 0 for the whole things.  */
2638
2639  fmt = GET_RTX_FORMAT (code);
2640  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2641    {
2642      switch (fmt[i])
2643	{
2644	case 'e':
2645	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2646	    return 0;
2647	  break;
2648
2649	case 'E':
2650	  if (XVECLEN (x, i) != XVECLEN (y, i))
2651	    return 0;
2652	  for (j = 0; j < XVECLEN (x, i); j++)
2653	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2654			       validate, equal_values))
2655	      return 0;
2656	  break;
2657
2658	case 's':
2659	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2660	    return 0;
2661	  break;
2662
2663	case 'i':
2664	  if (XINT (x, i) != XINT (y, i))
2665	    return 0;
2666	  break;
2667
2668	case 'w':
2669	  if (XWINT (x, i) != XWINT (y, i))
2670	    return 0;
2671	  break;
2672
2673	case '0':
2674	case 't':
2675	  break;
2676
2677	default:
2678	  abort ();
2679	}
2680    }
2681
2682  return 1;
2683}
2684
2685/* Return 1 if X has a value that can vary even between two
2686   executions of the program.  0 means X can be compared reliably
2687   against certain constants or near-constants.  */
2688
2689static int
2690cse_rtx_varies_p (x, from_alias)
2691     rtx x;
2692     int from_alias;
2693{
2694  /* We need not check for X and the equivalence class being of the same
2695     mode because if X is equivalent to a constant in some mode, it
2696     doesn't vary in any mode.  */
2697
2698  if (GET_CODE (x) == REG
2699      && REGNO_QTY_VALID_P (REGNO (x)))
2700    {
2701      int x_q = REG_QTY (REGNO (x));
2702      struct qty_table_elem *x_ent = &qty_table[x_q];
2703
2704      if (GET_MODE (x) == x_ent->mode
2705	  && x_ent->const_rtx != NULL_RTX)
2706	return 0;
2707    }
2708
2709  if (GET_CODE (x) == PLUS
2710      && GET_CODE (XEXP (x, 1)) == CONST_INT
2711      && GET_CODE (XEXP (x, 0)) == REG
2712      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2713    {
2714      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2715      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2716
2717      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2718	  && x0_ent->const_rtx != NULL_RTX)
2719	return 0;
2720    }
2721
2722  /* This can happen as the result of virtual register instantiation, if
2723     the initial constant is too large to be a valid address.  This gives
2724     us a three instruction sequence, load large offset into a register,
2725     load fp minus a constant into a register, then a MEM which is the
2726     sum of the two `constant' registers.  */
2727  if (GET_CODE (x) == PLUS
2728      && GET_CODE (XEXP (x, 0)) == REG
2729      && GET_CODE (XEXP (x, 1)) == REG
2730      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2731      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2732    {
2733      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2734      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2735      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2736      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2737
2738      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2739	  && x0_ent->const_rtx != NULL_RTX
2740	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2741	  && x1_ent->const_rtx != NULL_RTX)
2742	return 0;
2743    }
2744
2745  return rtx_varies_p (x, from_alias);
2746}
2747
2748/* Canonicalize an expression:
2749   replace each register reference inside it
2750   with the "oldest" equivalent register.
2751
2752   If INSN is non-zero and we are replacing a pseudo with a hard register
2753   or vice versa, validate_change is used to ensure that INSN remains valid
2754   after we make our substitution.  The calls are made with IN_GROUP non-zero
2755   so apply_change_group must be called upon the outermost return from this
2756   function (unless INSN is zero).  The result of apply_change_group can
2757   generally be discarded since the changes we are making are optional.  */
2758
2759static rtx
2760canon_reg (x, insn)
2761     rtx x;
2762     rtx insn;
2763{
2764  int i;
2765  enum rtx_code code;
2766  const char *fmt;
2767
2768  if (x == 0)
2769    return x;
2770
2771  code = GET_CODE (x);
2772  switch (code)
2773    {
2774    case PC:
2775    case CC0:
2776    case CONST:
2777    case CONST_INT:
2778    case CONST_DOUBLE:
2779    case SYMBOL_REF:
2780    case LABEL_REF:
2781    case ADDR_VEC:
2782    case ADDR_DIFF_VEC:
2783      return x;
2784
2785    case REG:
2786      {
2787	int first;
2788	int q;
2789	struct qty_table_elem *ent;
2790
2791	/* Never replace a hard reg, because hard regs can appear
2792	   in more than one machine mode, and we must preserve the mode
2793	   of each occurrence.  Also, some hard regs appear in
2794	   MEMs that are shared and mustn't be altered.  Don't try to
2795	   replace any reg that maps to a reg of class NO_REGS.  */
2796	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2797	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2798	  return x;
2799
2800	q = REG_QTY (REGNO (x));
2801	ent = &qty_table[q];
2802	first = ent->first_reg;
2803	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2804		: REGNO_REG_CLASS (first) == NO_REGS ? x
2805		: gen_rtx_REG (ent->mode, first));
2806      }
2807
2808    default:
2809      break;
2810    }
2811
2812  fmt = GET_RTX_FORMAT (code);
2813  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2814    {
2815      int j;
2816
2817      if (fmt[i] == 'e')
2818	{
2819	  rtx new = canon_reg (XEXP (x, i), insn);
2820	  int insn_code;
2821
2822	  /* If replacing pseudo with hard reg or vice versa, ensure the
2823	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2824	  if (insn != 0 && new != 0
2825	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2826	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2827		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2828		  || (insn_code = recog_memoized (insn)) < 0
2829		  || insn_data[insn_code].n_dups > 0))
2830	    validate_change (insn, &XEXP (x, i), new, 1);
2831	  else
2832	    XEXP (x, i) = new;
2833	}
2834      else if (fmt[i] == 'E')
2835	for (j = 0; j < XVECLEN (x, i); j++)
2836	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2837    }
2838
2839  return x;
2840}
2841
2842/* LOC is a location within INSN that is an operand address (the contents of
2843   a MEM).  Find the best equivalent address to use that is valid for this
2844   insn.
2845
2846   On most CISC machines, complicated address modes are costly, and rtx_cost
2847   is a good approximation for that cost.  However, most RISC machines have
2848   only a few (usually only one) memory reference formats.  If an address is
2849   valid at all, it is often just as cheap as any other address.  Hence, for
2850   RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2851   costs of various addresses.  For two addresses of equal cost, choose the one
2852   with the highest `rtx_cost' value as that has the potential of eliminating
2853   the most insns.  For equal costs, we choose the first in the equivalence
2854   class.  Note that we ignore the fact that pseudo registers are cheaper
2855   than hard registers here because we would also prefer the pseudo registers.
2856  */
2857
2858static void
2859find_best_addr (insn, loc, mode)
2860     rtx insn;
2861     rtx *loc;
2862     enum machine_mode mode;
2863{
2864  struct table_elt *elt;
2865  rtx addr = *loc;
2866#ifdef ADDRESS_COST
2867  struct table_elt *p;
2868  int found_better = 1;
2869#endif
2870  int save_do_not_record = do_not_record;
2871  int save_hash_arg_in_memory = hash_arg_in_memory;
2872  int addr_volatile;
2873  int regno;
2874  unsigned hash;
2875
2876  /* Do not try to replace constant addresses or addresses of local and
2877     argument slots.  These MEM expressions are made only once and inserted
2878     in many instructions, as well as being used to control symbol table
2879     output.  It is not safe to clobber them.
2880
2881     There are some uncommon cases where the address is already in a register
2882     for some reason, but we cannot take advantage of that because we have
2883     no easy way to unshare the MEM.  In addition, looking up all stack
2884     addresses is costly.  */
2885  if ((GET_CODE (addr) == PLUS
2886       && GET_CODE (XEXP (addr, 0)) == REG
2887       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2888       && (regno = REGNO (XEXP (addr, 0)),
2889	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2890	   || regno == ARG_POINTER_REGNUM))
2891      || (GET_CODE (addr) == REG
2892	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2893	      || regno == HARD_FRAME_POINTER_REGNUM
2894	      || regno == ARG_POINTER_REGNUM))
2895      || GET_CODE (addr) == ADDRESSOF
2896      || CONSTANT_ADDRESS_P (addr))
2897    return;
2898
2899  /* If this address is not simply a register, try to fold it.  This will
2900     sometimes simplify the expression.  Many simplifications
2901     will not be valid, but some, usually applying the associative rule, will
2902     be valid and produce better code.  */
2903  if (GET_CODE (addr) != REG)
2904    {
2905      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2906      int addr_folded_cost = address_cost (folded, mode);
2907      int addr_cost = address_cost (addr, mode);
2908
2909      if ((addr_folded_cost < addr_cost
2910	   || (addr_folded_cost == addr_cost
2911	       /* ??? The rtx_cost comparison is left over from an older
2912		  version of this code.  It is probably no longer helpful.  */
2913	       && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2914		   || approx_reg_cost (folded) < approx_reg_cost (addr))))
2915	  && validate_change (insn, loc, folded, 0))
2916	addr = folded;
2917    }
2918
2919  /* If this address is not in the hash table, we can't look for equivalences
2920     of the whole address.  Also, ignore if volatile.  */
2921
2922  do_not_record = 0;
2923  hash = HASH (addr, Pmode);
2924  addr_volatile = do_not_record;
2925  do_not_record = save_do_not_record;
2926  hash_arg_in_memory = save_hash_arg_in_memory;
2927
2928  if (addr_volatile)
2929    return;
2930
2931  elt = lookup (addr, hash, Pmode);
2932
2933#ifndef ADDRESS_COST
2934  if (elt)
2935    {
2936      int our_cost = elt->cost;
2937
2938      /* Find the lowest cost below ours that works.  */
2939      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2940	if (elt->cost < our_cost
2941	    && (GET_CODE (elt->exp) == REG
2942		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2943	    && validate_change (insn, loc,
2944				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2945	  return;
2946    }
2947#else
2948
2949  if (elt)
2950    {
2951      /* We need to find the best (under the criteria documented above) entry
2952	 in the class that is valid.  We use the `flag' field to indicate
2953	 choices that were invalid and iterate until we can't find a better
2954	 one that hasn't already been tried.  */
2955
2956      for (p = elt->first_same_value; p; p = p->next_same_value)
2957	p->flag = 0;
2958
2959      while (found_better)
2960	{
2961	  int best_addr_cost = address_cost (*loc, mode);
2962	  int best_rtx_cost = (elt->cost + 1) >> 1;
2963	  int exp_cost;
2964	  struct table_elt *best_elt = elt;
2965
2966	  found_better = 0;
2967	  for (p = elt->first_same_value; p; p = p->next_same_value)
2968	    if (! p->flag)
2969	      {
2970		if ((GET_CODE (p->exp) == REG
2971		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2972		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2973			|| (exp_cost == best_addr_cost
2974			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
2975		  {
2976		    found_better = 1;
2977		    best_addr_cost = exp_cost;
2978		    best_rtx_cost = (p->cost + 1) >> 1;
2979		    best_elt = p;
2980		  }
2981	      }
2982
2983	  if (found_better)
2984	    {
2985	      if (validate_change (insn, loc,
2986				   canon_reg (copy_rtx (best_elt->exp),
2987					      NULL_RTX), 0))
2988		return;
2989	      else
2990		best_elt->flag = 1;
2991	    }
2992	}
2993    }
2994
2995  /* If the address is a binary operation with the first operand a register
2996     and the second a constant, do the same as above, but looking for
2997     equivalences of the register.  Then try to simplify before checking for
2998     the best address to use.  This catches a few cases:  First is when we
2999     have REG+const and the register is another REG+const.  We can often merge
3000     the constants and eliminate one insn and one register.  It may also be
3001     that a machine has a cheap REG+REG+const.  Finally, this improves the
3002     code on the Alpha for unaligned byte stores.  */
3003
3004  if (flag_expensive_optimizations
3005      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3006	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3007      && GET_CODE (XEXP (*loc, 0)) == REG
3008      && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3009    {
3010      rtx c = XEXP (*loc, 1);
3011
3012      do_not_record = 0;
3013      hash = HASH (XEXP (*loc, 0), Pmode);
3014      do_not_record = save_do_not_record;
3015      hash_arg_in_memory = save_hash_arg_in_memory;
3016
3017      elt = lookup (XEXP (*loc, 0), hash, Pmode);
3018      if (elt == 0)
3019	return;
3020
3021      /* We need to find the best (under the criteria documented above) entry
3022	 in the class that is valid.  We use the `flag' field to indicate
3023	 choices that were invalid and iterate until we can't find a better
3024	 one that hasn't already been tried.  */
3025
3026      for (p = elt->first_same_value; p; p = p->next_same_value)
3027	p->flag = 0;
3028
3029      while (found_better)
3030	{
3031	  int best_addr_cost = address_cost (*loc, mode);
3032	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
3033	  struct table_elt *best_elt = elt;
3034	  rtx best_rtx = *loc;
3035	  int count;
3036
3037	  /* This is at worst case an O(n^2) algorithm, so limit our search
3038	     to the first 32 elements on the list.  This avoids trouble
3039	     compiling code with very long basic blocks that can easily
3040	     call simplify_gen_binary so many times that we run out of
3041	     memory.  */
3042
3043	  found_better = 0;
3044	  for (p = elt->first_same_value, count = 0;
3045	       p && count < 32;
3046	       p = p->next_same_value, count++)
3047	    if (! p->flag
3048		&& (GET_CODE (p->exp) == REG
3049		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3050	      {
3051		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3052					       p->exp, c);
3053		int new_cost;
3054		new_cost = address_cost (new, mode);
3055
3056		if (new_cost < best_addr_cost
3057		    || (new_cost == best_addr_cost
3058			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3059		  {
3060		    found_better = 1;
3061		    best_addr_cost = new_cost;
3062		    best_rtx_cost = (COST (new) + 1) >> 1;
3063		    best_elt = p;
3064		    best_rtx = new;
3065		  }
3066	      }
3067
3068	  if (found_better)
3069	    {
3070	      if (validate_change (insn, loc,
3071				   canon_reg (copy_rtx (best_rtx),
3072					      NULL_RTX), 0))
3073		return;
3074	      else
3075		best_elt->flag = 1;
3076	    }
3077	}
3078    }
3079#endif
3080}
3081
3082/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3083   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3084   what values are being compared.
3085
3086   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3087   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3088   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3089   compared to produce cc0.
3090
3091   The return value is the comparison operator and is either the code of
3092   A or the code corresponding to the inverse of the comparison.  */
3093
3094static enum rtx_code
3095find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3096     enum rtx_code code;
3097     rtx *parg1, *parg2;
3098     enum machine_mode *pmode1, *pmode2;
3099{
3100  rtx arg1, arg2;
3101
3102  arg1 = *parg1, arg2 = *parg2;
3103
3104  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3105
3106  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3107    {
3108      /* Set non-zero when we find something of interest.  */
3109      rtx x = 0;
3110      int reverse_code = 0;
3111      struct table_elt *p = 0;
3112
3113      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3114	 On machines with CC0, this is the only case that can occur, since
3115	 fold_rtx will return the COMPARE or item being compared with zero
3116	 when given CC0.  */
3117
3118      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3119	x = arg1;
3120
3121      /* If ARG1 is a comparison operator and CODE is testing for
3122	 STORE_FLAG_VALUE, get the inner arguments.  */
3123
3124      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3125	{
3126	  if (code == NE
3127	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3128		  && code == LT && STORE_FLAG_VALUE == -1)
3129#ifdef FLOAT_STORE_FLAG_VALUE
3130	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3131		  && (REAL_VALUE_NEGATIVE
3132		      (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3133#endif
3134	      )
3135	    x = arg1;
3136	  else if (code == EQ
3137		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3138		       && code == GE && STORE_FLAG_VALUE == -1)
3139#ifdef FLOAT_STORE_FLAG_VALUE
3140		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3141		       && (REAL_VALUE_NEGATIVE
3142			   (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3143#endif
3144		   )
3145	    x = arg1, reverse_code = 1;
3146	}
3147
3148      /* ??? We could also check for
3149
3150	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3151
3152	 and related forms, but let's wait until we see them occurring.  */
3153
3154      if (x == 0)
3155	/* Look up ARG1 in the hash table and see if it has an equivalence
3156	   that lets us see what is being compared.  */
3157	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3158		    GET_MODE (arg1));
3159      if (p)
3160	{
3161	  p = p->first_same_value;
3162
3163	  /* If what we compare is already known to be constant, that is as
3164	     good as it gets.
3165	     We need to break the loop in this case, because otherwise we
3166	     can have an infinite loop when looking at a reg that is known
3167	     to be a constant which is the same as a comparison of a reg
3168	     against zero which appears later in the insn stream, which in
3169	     turn is constant and the same as the comparison of the first reg
3170	     against zero...  */
3171	  if (p->is_const)
3172	    break;
3173	}
3174
3175      for (; p; p = p->next_same_value)
3176	{
3177	  enum machine_mode inner_mode = GET_MODE (p->exp);
3178
3179	  /* If the entry isn't valid, skip it.  */
3180	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3181	    continue;
3182
3183	  if (GET_CODE (p->exp) == COMPARE
3184	      /* Another possibility is that this machine has a compare insn
3185		 that includes the comparison code.  In that case, ARG1 would
3186		 be equivalent to a comparison operation that would set ARG1 to
3187		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3188		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3189		 we must reverse ORIG_CODE.  On machine with a negative value
3190		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3191	      || ((code == NE
3192		   || (code == LT
3193		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3194		       && (GET_MODE_BITSIZE (inner_mode)
3195			   <= HOST_BITS_PER_WIDE_INT)
3196		       && (STORE_FLAG_VALUE
3197			   & ((HOST_WIDE_INT) 1
3198			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3199#ifdef FLOAT_STORE_FLAG_VALUE
3200		   || (code == LT
3201		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3202		       && (REAL_VALUE_NEGATIVE
3203			   (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3204#endif
3205		   )
3206		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3207	    {
3208	      x = p->exp;
3209	      break;
3210	    }
3211	  else if ((code == EQ
3212		    || (code == GE
3213			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3214			&& (GET_MODE_BITSIZE (inner_mode)
3215			    <= HOST_BITS_PER_WIDE_INT)
3216			&& (STORE_FLAG_VALUE
3217			    & ((HOST_WIDE_INT) 1
3218			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3219#ifdef FLOAT_STORE_FLAG_VALUE
3220		    || (code == GE
3221			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3222		        && (REAL_VALUE_NEGATIVE
3223			    (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3224#endif
3225		    )
3226		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3227	    {
3228	      reverse_code = 1;
3229	      x = p->exp;
3230	      break;
3231	    }
3232
3233	  /* If this is fp + constant, the equivalent is a better operand since
3234	     it may let us predict the value of the comparison.  */
3235	  else if (NONZERO_BASE_PLUS_P (p->exp))
3236	    {
3237	      arg1 = p->exp;
3238	      continue;
3239	    }
3240	}
3241
3242      /* If we didn't find a useful equivalence for ARG1, we are done.
3243	 Otherwise, set up for the next iteration.  */
3244      if (x == 0)
3245	break;
3246
3247      /* If we need to reverse the comparison, make sure that that is
3248	 possible -- we can't necessarily infer the value of GE from LT
3249	 with floating-point operands.  */
3250      if (reverse_code)
3251	{
3252	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3253	  if (reversed == UNKNOWN)
3254	    break;
3255	  else code = reversed;
3256	}
3257      else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3258	code = GET_CODE (x);
3259      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3260    }
3261
3262  /* Return our results.  Return the modes from before fold_rtx
3263     because fold_rtx might produce const_int, and then it's too late.  */
3264  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3265  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3266
3267  return code;
3268}
3269
3270/* If X is a nontrivial arithmetic operation on an argument
3271   for which a constant value can be determined, return
3272   the result of operating on that value, as a constant.
3273   Otherwise, return X, possibly with one or more operands
3274   modified by recursive calls to this function.
3275
3276   If X is a register whose contents are known, we do NOT
3277   return those contents here.  equiv_constant is called to
3278   perform that task.
3279
3280   INSN is the insn that we may be modifying.  If it is 0, make a copy
3281   of X before modifying it.  */
3282
3283static rtx
3284fold_rtx (x, insn)
3285     rtx x;
3286     rtx insn;
3287{
3288  enum rtx_code code;
3289  enum machine_mode mode;
3290  const char *fmt;
3291  int i;
3292  rtx new = 0;
3293  int copied = 0;
3294  int must_swap = 0;
3295
3296  /* Folded equivalents of first two operands of X.  */
3297  rtx folded_arg0;
3298  rtx folded_arg1;
3299
3300  /* Constant equivalents of first three operands of X;
3301     0 when no such equivalent is known.  */
3302  rtx const_arg0;
3303  rtx const_arg1;
3304  rtx const_arg2;
3305
3306  /* The mode of the first operand of X.  We need this for sign and zero
3307     extends.  */
3308  enum machine_mode mode_arg0;
3309
3310  if (x == 0)
3311    return x;
3312
3313  mode = GET_MODE (x);
3314  code = GET_CODE (x);
3315  switch (code)
3316    {
3317    case CONST:
3318    case CONST_INT:
3319    case CONST_DOUBLE:
3320    case SYMBOL_REF:
3321    case LABEL_REF:
3322    case REG:
3323      /* No use simplifying an EXPR_LIST
3324	 since they are used only for lists of args
3325	 in a function call's REG_EQUAL note.  */
3326    case EXPR_LIST:
3327      /* Changing anything inside an ADDRESSOF is incorrect; we don't
3328	 want to (e.g.,) make (addressof (const_int 0)) just because
3329	 the location is known to be zero.  */
3330    case ADDRESSOF:
3331      return x;
3332
3333#ifdef HAVE_cc0
3334    case CC0:
3335      return prev_insn_cc0;
3336#endif
3337
3338    case PC:
3339      /* If the next insn is a CODE_LABEL followed by a jump table,
3340	 PC's value is a LABEL_REF pointing to that label.  That
3341	 lets us fold switch statements on the VAX.  */
3342      if (insn && GET_CODE (insn) == JUMP_INSN)
3343	{
3344	  rtx next = next_nonnote_insn (insn);
3345
3346	  if (next && GET_CODE (next) == CODE_LABEL
3347	      && NEXT_INSN (next) != 0
3348	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3349	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3350		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3351	    return gen_rtx_LABEL_REF (Pmode, next);
3352	}
3353      break;
3354
3355    case SUBREG:
3356      /* See if we previously assigned a constant value to this SUBREG.  */
3357      if ((new = lookup_as_function (x, CONST_INT)) != 0
3358	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3359	return new;
3360
3361      /* If this is a paradoxical SUBREG, we have no idea what value the
3362	 extra bits would have.  However, if the operand is equivalent
3363	 to a SUBREG whose operand is the same as our mode, and all the
3364	 modes are within a word, we can just use the inner operand
3365	 because these SUBREGs just say how to treat the register.
3366
3367	 Similarly if we find an integer constant.  */
3368
3369      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3370	{
3371	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3372	  struct table_elt *elt;
3373
3374	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3375	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3376	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3377				imode)) != 0)
3378	    for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3379	      {
3380		if (CONSTANT_P (elt->exp)
3381		    && GET_MODE (elt->exp) == VOIDmode)
3382		  return elt->exp;
3383
3384		if (GET_CODE (elt->exp) == SUBREG
3385		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
3386		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3387		  return copy_rtx (SUBREG_REG (elt->exp));
3388	      }
3389
3390	  return x;
3391	}
3392
3393      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
3394	 We might be able to if the SUBREG is extracting a single word in an
3395	 integral mode or extracting the low part.  */
3396
3397      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3398      const_arg0 = equiv_constant (folded_arg0);
3399      if (const_arg0)
3400	folded_arg0 = const_arg0;
3401
3402      if (folded_arg0 != SUBREG_REG (x))
3403	{
3404	  new = simplify_subreg (mode, folded_arg0,
3405				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3406	  if (new)
3407	    return new;
3408	}
3409
3410      /* If this is a narrowing SUBREG and our operand is a REG, see if
3411	 we can find an equivalence for REG that is an arithmetic operation
3412	 in a wider mode where both operands are paradoxical SUBREGs
3413	 from objects of our result mode.  In that case, we couldn't report
3414	 an equivalent value for that operation, since we don't know what the
3415	 extra bits will be.  But we can find an equivalence for this SUBREG
3416	 by folding that operation is the narrow mode.  This allows us to
3417	 fold arithmetic in narrow modes when the machine only supports
3418	 word-sized arithmetic.
3419
3420	 Also look for a case where we have a SUBREG whose operand is the
3421	 same as our result.  If both modes are smaller than a word, we
3422	 are simply interpreting a register in different modes and we
3423	 can use the inner value.  */
3424
3425      if (GET_CODE (folded_arg0) == REG
3426	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3427	  && subreg_lowpart_p (x))
3428	{
3429	  struct table_elt *elt;
3430
3431	  /* We can use HASH here since we know that canon_hash won't be
3432	     called.  */
3433	  elt = lookup (folded_arg0,
3434			HASH (folded_arg0, GET_MODE (folded_arg0)),
3435			GET_MODE (folded_arg0));
3436
3437	  if (elt)
3438	    elt = elt->first_same_value;
3439
3440	  for (; elt; elt = elt->next_same_value)
3441	    {
3442	      enum rtx_code eltcode = GET_CODE (elt->exp);
3443
3444	      /* Just check for unary and binary operations.  */
3445	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3446		  && GET_CODE (elt->exp) != SIGN_EXTEND
3447		  && GET_CODE (elt->exp) != ZERO_EXTEND
3448		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3449		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3450		{
3451		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3452
3453		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3454		    op0 = fold_rtx (op0, NULL_RTX);
3455
3456		  op0 = equiv_constant (op0);
3457		  if (op0)
3458		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3459						    op0, mode);
3460		}
3461	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3462			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3463		       && eltcode != DIV && eltcode != MOD
3464		       && eltcode != UDIV && eltcode != UMOD
3465		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3466		       && eltcode != ROTATE && eltcode != ROTATERT
3467		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3468			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3469				== mode))
3470			   || CONSTANT_P (XEXP (elt->exp, 0)))
3471		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3472			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3473				== mode))
3474			   || CONSTANT_P (XEXP (elt->exp, 1))))
3475		{
3476		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3477		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3478
3479		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3480		    op0 = fold_rtx (op0, NULL_RTX);
3481
3482		  if (op0)
3483		    op0 = equiv_constant (op0);
3484
3485		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3486		    op1 = fold_rtx (op1, NULL_RTX);
3487
3488		  if (op1)
3489		    op1 = equiv_constant (op1);
3490
3491		  /* If we are looking for the low SImode part of
3492		     (ashift:DI c (const_int 32)), it doesn't work
3493		     to compute that in SImode, because a 32-bit shift
3494		     in SImode is unpredictable.  We know the value is 0.  */
3495		  if (op0 && op1
3496		      && GET_CODE (elt->exp) == ASHIFT
3497		      && GET_CODE (op1) == CONST_INT
3498		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3499		    {
3500		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3501
3502			/* If the count fits in the inner mode's width,
3503			   but exceeds the outer mode's width,
3504			   the value will get truncated to 0
3505			   by the subreg.  */
3506			new = const0_rtx;
3507		      else
3508			/* If the count exceeds even the inner mode's width,
3509			   don't fold this expression.  */
3510			new = 0;
3511		    }
3512		  else if (op0 && op1)
3513		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3514						     op0, op1);
3515		}
3516
3517	      else if (GET_CODE (elt->exp) == SUBREG
3518		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
3519		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3520			   <= UNITS_PER_WORD)
3521		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3522		new = copy_rtx (SUBREG_REG (elt->exp));
3523
3524	      if (new)
3525		return new;
3526	    }
3527	}
3528
3529      return x;
3530
3531    case NOT:
3532    case NEG:
3533      /* If we have (NOT Y), see if Y is known to be (NOT Z).
3534	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3535      new = lookup_as_function (XEXP (x, 0), code);
3536      if (new)
3537	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3538      break;
3539
3540    case MEM:
3541      /* If we are not actually processing an insn, don't try to find the
3542	 best address.  Not only don't we care, but we could modify the
3543	 MEM in an invalid way since we have no insn to validate against.  */
3544      if (insn != 0)
3545	find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3546
3547      {
3548	/* Even if we don't fold in the insn itself,
3549	   we can safely do so here, in hopes of getting a constant.  */
3550	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3551	rtx base = 0;
3552	HOST_WIDE_INT offset = 0;
3553
3554	if (GET_CODE (addr) == REG
3555	    && REGNO_QTY_VALID_P (REGNO (addr)))
3556	  {
3557	    int addr_q = REG_QTY (REGNO (addr));
3558	    struct qty_table_elem *addr_ent = &qty_table[addr_q];
3559
3560	    if (GET_MODE (addr) == addr_ent->mode
3561		&& addr_ent->const_rtx != NULL_RTX)
3562	      addr = addr_ent->const_rtx;
3563	  }
3564
3565	/* If address is constant, split it into a base and integer offset.  */
3566	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3567	  base = addr;
3568	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3569		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3570	  {
3571	    base = XEXP (XEXP (addr, 0), 0);
3572	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3573	  }
3574	else if (GET_CODE (addr) == LO_SUM
3575		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3576	  base = XEXP (addr, 1);
3577	else if (GET_CODE (addr) == ADDRESSOF)
3578	  return change_address (x, VOIDmode, addr);
3579
3580	/* If this is a constant pool reference, we can fold it into its
3581	   constant to allow better value tracking.  */
3582	if (base && GET_CODE (base) == SYMBOL_REF
3583	    && CONSTANT_POOL_ADDRESS_P (base))
3584	  {
3585	    rtx constant = get_pool_constant (base);
3586	    enum machine_mode const_mode = get_pool_mode (base);
3587	    rtx new;
3588
3589	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3590	      constant_pool_entries_cost = COST (constant);
3591
3592	    /* If we are loading the full constant, we have an equivalence.  */
3593	    if (offset == 0 && mode == const_mode)
3594	      return constant;
3595
3596	    /* If this actually isn't a constant (weird!), we can't do
3597	       anything.  Otherwise, handle the two most common cases:
3598	       extracting a word from a multi-word constant, and extracting
3599	       the low-order bits.  Other cases don't seem common enough to
3600	       worry about.  */
3601	    if (! CONSTANT_P (constant))
3602	      return x;
3603
3604	    if (GET_MODE_CLASS (mode) == MODE_INT
3605		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
3606		&& offset % UNITS_PER_WORD == 0
3607		&& (new = operand_subword (constant,
3608					   offset / UNITS_PER_WORD,
3609					   0, const_mode)) != 0)
3610	      return new;
3611
3612	    if (((BYTES_BIG_ENDIAN
3613		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3614		 || (! BYTES_BIG_ENDIAN && offset == 0))
3615		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
3616	      return new;
3617	  }
3618
3619	/* If this is a reference to a label at a known position in a jump
3620	   table, we also know its value.  */
3621	if (base && GET_CODE (base) == LABEL_REF)
3622	  {
3623	    rtx label = XEXP (base, 0);
3624	    rtx table_insn = NEXT_INSN (label);
3625
3626	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3627		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3628	      {
3629		rtx table = PATTERN (table_insn);
3630
3631		if (offset >= 0
3632		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3633			< XVECLEN (table, 0)))
3634		  return XVECEXP (table, 0,
3635				  offset / GET_MODE_SIZE (GET_MODE (table)));
3636	      }
3637	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3638		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3639	      {
3640		rtx table = PATTERN (table_insn);
3641
3642		if (offset >= 0
3643		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3644			< XVECLEN (table, 1)))
3645		  {
3646		    offset /= GET_MODE_SIZE (GET_MODE (table));
3647		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3648					 XEXP (table, 0));
3649
3650		    if (GET_MODE (table) != Pmode)
3651		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3652
3653		    /* Indicate this is a constant.  This isn't a
3654		       valid form of CONST, but it will only be used
3655		       to fold the next insns and then discarded, so
3656		       it should be safe.
3657
3658		       Note this expression must be explicitly discarded,
3659		       by cse_insn, else it may end up in a REG_EQUAL note
3660		       and "escape" to cause problems elsewhere.  */
3661		    return gen_rtx_CONST (GET_MODE (new), new);
3662		  }
3663	      }
3664	  }
3665
3666	return x;
3667      }
3668
3669#ifdef NO_FUNCTION_CSE
3670    case CALL:
3671      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3672	return x;
3673      break;
3674#endif
3675
3676    case ASM_OPERANDS:
3677      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3678	validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3679			 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3680      break;
3681
3682    default:
3683      break;
3684    }
3685
3686  const_arg0 = 0;
3687  const_arg1 = 0;
3688  const_arg2 = 0;
3689  mode_arg0 = VOIDmode;
3690
3691  /* Try folding our operands.
3692     Then see which ones have constant values known.  */
3693
3694  fmt = GET_RTX_FORMAT (code);
3695  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3696    if (fmt[i] == 'e')
3697      {
3698	rtx arg = XEXP (x, i);
3699	rtx folded_arg = arg, const_arg = 0;
3700	enum machine_mode mode_arg = GET_MODE (arg);
3701	rtx cheap_arg, expensive_arg;
3702	rtx replacements[2];
3703	int j;
3704
3705	/* Most arguments are cheap, so handle them specially.  */
3706	switch (GET_CODE (arg))
3707	  {
3708	  case REG:
3709	    /* This is the same as calling equiv_constant; it is duplicated
3710	       here for speed.  */
3711	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3712	      {
3713		int arg_q = REG_QTY (REGNO (arg));
3714		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3715
3716		if (arg_ent->const_rtx != NULL_RTX
3717		    && GET_CODE (arg_ent->const_rtx) != REG
3718		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3719		  const_arg
3720		    = gen_lowpart_if_possible (GET_MODE (arg),
3721					       arg_ent->const_rtx);
3722	      }
3723	    break;
3724
3725	  case CONST:
3726	  case CONST_INT:
3727	  case SYMBOL_REF:
3728	  case LABEL_REF:
3729	  case CONST_DOUBLE:
3730	    const_arg = arg;
3731	    break;
3732
3733#ifdef HAVE_cc0
3734	  case CC0:
3735	    folded_arg = prev_insn_cc0;
3736	    mode_arg = prev_insn_cc0_mode;
3737	    const_arg = equiv_constant (folded_arg);
3738	    break;
3739#endif
3740
3741	  default:
3742	    folded_arg = fold_rtx (arg, insn);
3743	    const_arg = equiv_constant (folded_arg);
3744	  }
3745
3746	/* For the first three operands, see if the operand
3747	   is constant or equivalent to a constant.  */
3748	switch (i)
3749	  {
3750	  case 0:
3751	    folded_arg0 = folded_arg;
3752	    const_arg0 = const_arg;
3753	    mode_arg0 = mode_arg;
3754	    break;
3755	  case 1:
3756	    folded_arg1 = folded_arg;
3757	    const_arg1 = const_arg;
3758	    break;
3759	  case 2:
3760	    const_arg2 = const_arg;
3761	    break;
3762	  }
3763
3764	/* Pick the least expensive of the folded argument and an
3765	   equivalent constant argument.  */
3766	if (const_arg == 0 || const_arg == folded_arg
3767	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3768	  cheap_arg = folded_arg, expensive_arg = const_arg;
3769	else
3770	  cheap_arg = const_arg, expensive_arg = folded_arg;
3771
3772	/* Try to replace the operand with the cheapest of the two
3773	   possibilities.  If it doesn't work and this is either of the first
3774	   two operands of a commutative operation, try swapping them.
3775	   If THAT fails, try the more expensive, provided it is cheaper
3776	   than what is already there.  */
3777
3778	if (cheap_arg == XEXP (x, i))
3779	  continue;
3780
3781	if (insn == 0 && ! copied)
3782	  {
3783	    x = copy_rtx (x);
3784	    copied = 1;
3785	  }
3786
3787	/* Order the replacements from cheapest to most expensive.  */
3788	replacements[0] = cheap_arg;
3789	replacements[1] = expensive_arg;
3790
3791	for (j = 0; j < 2 && replacements[j];  j++)
3792	  {
3793	    int old_cost = COST_IN (XEXP (x, i), code);
3794	    int new_cost = COST_IN (replacements[j], code);
3795
3796	    /* Stop if what existed before was cheaper.  Prefer constants
3797	       in the case of a tie.  */
3798	    if (new_cost > old_cost
3799		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3800	      break;
3801
3802	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3803	      break;
3804
3805	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3806		|| code == LTGT || code == UNEQ || code == ORDERED
3807		|| code == UNORDERED)
3808	      {
3809		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3810		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3811
3812		if (apply_change_group ())
3813		  {
3814		    /* Swap them back to be invalid so that this loop can
3815		       continue and flag them to be swapped back later.  */
3816		    rtx tem;
3817
3818		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3819				       XEXP (x, 1) = tem;
3820		    must_swap = 1;
3821		    break;
3822		  }
3823	      }
3824	  }
3825      }
3826
3827    else
3828      {
3829	if (fmt[i] == 'E')
3830	  /* Don't try to fold inside of a vector of expressions.
3831	     Doing nothing is harmless.  */
3832	  {;}
3833      }
3834
3835  /* If a commutative operation, place a constant integer as the second
3836     operand unless the first operand is also a constant integer.  Otherwise,
3837     place any constant second unless the first operand is also a constant.  */
3838
3839  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3840      || code == LTGT || code == UNEQ || code == ORDERED
3841      || code == UNORDERED)
3842    {
3843      if (must_swap || (const_arg0
3844	  		&& (const_arg1 == 0
3845	      		    || (GET_CODE (const_arg0) == CONST_INT
3846			        && GET_CODE (const_arg1) != CONST_INT))))
3847	{
3848	  rtx tem = XEXP (x, 0);
3849
3850	  if (insn == 0 && ! copied)
3851	    {
3852	      x = copy_rtx (x);
3853	      copied = 1;
3854	    }
3855
3856	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3857	  validate_change (insn, &XEXP (x, 1), tem, 1);
3858	  if (apply_change_group ())
3859	    {
3860	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3861	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3862	    }
3863	}
3864    }
3865
3866  /* If X is an arithmetic operation, see if we can simplify it.  */
3867
3868  switch (GET_RTX_CLASS (code))
3869    {
3870    case '1':
3871      {
3872	int is_const = 0;
3873
3874	/* We can't simplify extension ops unless we know the
3875	   original mode.  */
3876	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3877	    && mode_arg0 == VOIDmode)
3878	  break;
3879
3880	/* If we had a CONST, strip it off and put it back later if we
3881	   fold.  */
3882	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3883	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3884
3885	new = simplify_unary_operation (code, mode,
3886					const_arg0 ? const_arg0 : folded_arg0,
3887					mode_arg0);
3888	if (new != 0 && is_const)
3889	  new = gen_rtx_CONST (mode, new);
3890      }
3891      break;
3892
3893    case '<':
3894      /* See what items are actually being compared and set FOLDED_ARG[01]
3895	 to those values and CODE to the actual comparison code.  If any are
3896	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3897	 do anything if both operands are already known to be constant.  */
3898
3899      if (const_arg0 == 0 || const_arg1 == 0)
3900	{
3901	  struct table_elt *p0, *p1;
3902	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3903	  enum machine_mode mode_arg1;
3904
3905#ifdef FLOAT_STORE_FLAG_VALUE
3906	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3907	    {
3908	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3909		      (FLOAT_STORE_FLAG_VALUE (mode), mode));
3910	      false_rtx = CONST0_RTX (mode);
3911	    }
3912#endif
3913
3914	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3915				       &mode_arg0, &mode_arg1);
3916	  const_arg0 = equiv_constant (folded_arg0);
3917	  const_arg1 = equiv_constant (folded_arg1);
3918
3919	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3920	     what kinds of things are being compared, so we can't do
3921	     anything with this comparison.  */
3922
3923	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3924	    break;
3925
3926	  /* If we do not now have two constants being compared, see
3927	     if we can nevertheless deduce some things about the
3928	     comparison.  */
3929	  if (const_arg0 == 0 || const_arg1 == 0)
3930	    {
3931	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
3932		 non-explicit constant?  These aren't zero, but we
3933		 don't know their sign.  */
3934	      if (const_arg1 == const0_rtx
3935		  && (NONZERO_BASE_PLUS_P (folded_arg0)
3936#if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3937	  come out as 0.  */
3938		      || GET_CODE (folded_arg0) == SYMBOL_REF
3939#endif
3940		      || GET_CODE (folded_arg0) == LABEL_REF
3941		      || GET_CODE (folded_arg0) == CONST))
3942		{
3943		  if (code == EQ)
3944		    return false_rtx;
3945		  else if (code == NE)
3946		    return true_rtx;
3947		}
3948
3949	      /* See if the two operands are the same.  */
3950
3951	      if (folded_arg0 == folded_arg1
3952		  || (GET_CODE (folded_arg0) == REG
3953		      && GET_CODE (folded_arg1) == REG
3954		      && (REG_QTY (REGNO (folded_arg0))
3955			  == REG_QTY (REGNO (folded_arg1))))
3956		  || ((p0 = lookup (folded_arg0,
3957				    (safe_hash (folded_arg0, mode_arg0)
3958				     & HASH_MASK), mode_arg0))
3959		      && (p1 = lookup (folded_arg1,
3960				       (safe_hash (folded_arg1, mode_arg0)
3961					& HASH_MASK), mode_arg0))
3962		      && p0->first_same_value == p1->first_same_value))
3963		{
3964		   /* Sadly two equal NaNs are not equivalent.  */
3965		   if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3966		       || ! FLOAT_MODE_P (mode_arg0)
3967		       || flag_unsafe_math_optimizations)
3968		      return ((code == EQ || code == LE || code == GE
3969			       || code == LEU || code == GEU || code == UNEQ
3970			       || code == UNLE || code == UNGE || code == ORDERED)
3971			      ? true_rtx : false_rtx);
3972		   /* Take care for the FP compares we can resolve.  */
3973		   if (code == UNEQ || code == UNLE || code == UNGE)
3974		     return true_rtx;
3975		   if (code == LTGT || code == LT || code == GT)
3976		     return false_rtx;
3977		}
3978
3979	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3980		 doing now is either the same as we did before or the reverse
3981		 (we only check the reverse if not floating-point).  */
3982	      else if (GET_CODE (folded_arg0) == REG)
3983		{
3984		  int qty = REG_QTY (REGNO (folded_arg0));
3985
3986		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3987		    {
3988		      struct qty_table_elem *ent = &qty_table[qty];
3989
3990		      if ((comparison_dominates_p (ent->comparison_code, code)
3991			   || (! FLOAT_MODE_P (mode_arg0)
3992			       && comparison_dominates_p (ent->comparison_code,
3993						          reverse_condition (code))))
3994			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3995			      || (const_arg1
3996				  && rtx_equal_p (ent->comparison_const,
3997						  const_arg1))
3998			      || (GET_CODE (folded_arg1) == REG
3999				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4000			return (comparison_dominates_p (ent->comparison_code, code)
4001				? true_rtx : false_rtx);
4002		    }
4003		}
4004	    }
4005	}
4006
4007      /* If we are comparing against zero, see if the first operand is
4008	 equivalent to an IOR with a constant.  If so, we may be able to
4009	 determine the result of this comparison.  */
4010
4011      if (const_arg1 == const0_rtx)
4012	{
4013	  rtx y = lookup_as_function (folded_arg0, IOR);
4014	  rtx inner_const;
4015
4016	  if (y != 0
4017	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4018	      && GET_CODE (inner_const) == CONST_INT
4019	      && INTVAL (inner_const) != 0)
4020	    {
4021	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4022	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4023			      && (INTVAL (inner_const)
4024				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4025	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4026
4027#ifdef FLOAT_STORE_FLAG_VALUE
4028	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4029		{
4030		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4031			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4032		  false_rtx = CONST0_RTX (mode);
4033		}
4034#endif
4035
4036	      switch (code)
4037		{
4038		case EQ:
4039		  return false_rtx;
4040		case NE:
4041		  return true_rtx;
4042		case LT:  case LE:
4043		  if (has_sign)
4044		    return true_rtx;
4045		  break;
4046		case GT:  case GE:
4047		  if (has_sign)
4048		    return false_rtx;
4049		  break;
4050		default:
4051		  break;
4052		}
4053	    }
4054	}
4055
4056      new = simplify_relational_operation (code,
4057					   (mode_arg0 != VOIDmode
4058					    ? mode_arg0
4059					    : (GET_MODE (const_arg0
4060							 ? const_arg0
4061							 : folded_arg0)
4062					       != VOIDmode)
4063					    ? GET_MODE (const_arg0
4064							? const_arg0
4065							: folded_arg0)
4066					    : GET_MODE (const_arg1
4067							? const_arg1
4068							: folded_arg1)),
4069					   const_arg0 ? const_arg0 : folded_arg0,
4070					   const_arg1 ? const_arg1 : folded_arg1);
4071#ifdef FLOAT_STORE_FLAG_VALUE
4072      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4073	{
4074	  if (new == const0_rtx)
4075	    new = CONST0_RTX (mode);
4076	  else
4077	    new = (CONST_DOUBLE_FROM_REAL_VALUE
4078		   (FLOAT_STORE_FLAG_VALUE (mode), mode));
4079	}
4080#endif
4081      break;
4082
4083    case '2':
4084    case 'c':
4085      switch (code)
4086	{
4087	case PLUS:
4088	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4089	     with that LABEL_REF as its second operand.  If so, the result is
4090	     the first operand of that MINUS.  This handles switches with an
4091	     ADDR_DIFF_VEC table.  */
4092	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4093	    {
4094	      rtx y
4095		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4096		: lookup_as_function (folded_arg0, MINUS);
4097
4098	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4099		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4100		return XEXP (y, 0);
4101
4102	      /* Now try for a CONST of a MINUS like the above.  */
4103	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4104			: lookup_as_function (folded_arg0, CONST))) != 0
4105		  && GET_CODE (XEXP (y, 0)) == MINUS
4106		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4107		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4108		return XEXP (XEXP (y, 0), 0);
4109	    }
4110
4111	  /* Likewise if the operands are in the other order.  */
4112	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4113	    {
4114	      rtx y
4115		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4116		: lookup_as_function (folded_arg1, MINUS);
4117
4118	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4119		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4120		return XEXP (y, 0);
4121
4122	      /* Now try for a CONST of a MINUS like the above.  */
4123	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4124			: lookup_as_function (folded_arg1, CONST))) != 0
4125		  && GET_CODE (XEXP (y, 0)) == MINUS
4126		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4127		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4128		return XEXP (XEXP (y, 0), 0);
4129	    }
4130
4131	  /* If second operand is a register equivalent to a negative
4132	     CONST_INT, see if we can find a register equivalent to the
4133	     positive constant.  Make a MINUS if so.  Don't do this for
4134	     a non-negative constant since we might then alternate between
4135	     choosing positive and negative constants.  Having the positive
4136	     constant previously-used is the more common case.  Be sure
4137	     the resulting constant is non-negative; if const_arg1 were
4138	     the smallest negative number this would overflow: depending
4139	     on the mode, this would either just be the same value (and
4140	     hence not save anything) or be incorrect.  */
4141	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4142	      && INTVAL (const_arg1) < 0
4143	      /* This used to test
4144
4145	         -INTVAL (const_arg1) >= 0
4146
4147		 But The Sun V5.0 compilers mis-compiled that test.  So
4148		 instead we test for the problematic value in a more direct
4149		 manner and hope the Sun compilers get it correct.  */
4150	      && INTVAL (const_arg1) !=
4151	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4152	      && GET_CODE (folded_arg1) == REG)
4153	    {
4154	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4155	      struct table_elt *p
4156		= lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4157			  mode);
4158
4159	      if (p)
4160		for (p = p->first_same_value; p; p = p->next_same_value)
4161		  if (GET_CODE (p->exp) == REG)
4162		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4163						canon_reg (p->exp, NULL_RTX));
4164	    }
4165	  goto from_plus;
4166
4167	case MINUS:
4168	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4169	     If so, produce (PLUS Z C2-C).  */
4170	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4171	    {
4172	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4173	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4174		return fold_rtx (plus_constant (copy_rtx (y),
4175						-INTVAL (const_arg1)),
4176				 NULL_RTX);
4177	    }
4178
4179	  /* Fall through.  */
4180
4181	from_plus:
4182	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4183	case IOR:     case AND:       case XOR:
4184	case MULT:    case DIV:       case UDIV:
4185	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4186	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4187	     is known to be of similar form, we may be able to replace the
4188	     operation with a combined operation.  This may eliminate the
4189	     intermediate operation if every use is simplified in this way.
4190	     Note that the similar optimization done by combine.c only works
4191	     if the intermediate operation's result has only one reference.  */
4192
4193	  if (GET_CODE (folded_arg0) == REG
4194	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4195	    {
4196	      int is_shift
4197		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4198	      rtx y = lookup_as_function (folded_arg0, code);
4199	      rtx inner_const;
4200	      enum rtx_code associate_code;
4201	      rtx new_const;
4202
4203	      if (y == 0
4204		  || 0 == (inner_const
4205			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4206		  || GET_CODE (inner_const) != CONST_INT
4207		  /* If we have compiled a statement like
4208		     "if (x == (x & mask1))", and now are looking at
4209		     "x & mask2", we will have a case where the first operand
4210		     of Y is the same as our first operand.  Unless we detect
4211		     this case, an infinite loop will result.  */
4212		  || XEXP (y, 0) == folded_arg0)
4213		break;
4214
4215	      /* Don't associate these operations if they are a PLUS with the
4216		 same constant and it is a power of two.  These might be doable
4217		 with a pre- or post-increment.  Similarly for two subtracts of
4218		 identical powers of two with post decrement.  */
4219
4220	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4221		  && ((HAVE_PRE_INCREMENT
4222			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4223		      || (HAVE_POST_INCREMENT
4224			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4225		      || (HAVE_PRE_DECREMENT
4226			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4227		      || (HAVE_POST_DECREMENT
4228			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4229		break;
4230
4231	      /* Compute the code used to compose the constants.  For example,
4232		 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT.  */
4233
4234	      associate_code
4235		= (code == MULT || code == DIV || code == UDIV ? MULT
4236		   : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4237
4238	      new_const = simplify_binary_operation (associate_code, mode,
4239						     const_arg1, inner_const);
4240
4241	      if (new_const == 0)
4242		break;
4243
4244	      /* If we are associating shift operations, don't let this
4245		 produce a shift of the size of the object or larger.
4246		 This could occur when we follow a sign-extend by a right
4247		 shift on a machine that does a sign-extend as a pair
4248		 of shifts.  */
4249
4250	      if (is_shift && GET_CODE (new_const) == CONST_INT
4251		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4252		{
4253		  /* As an exception, we can turn an ASHIFTRT of this
4254		     form into a shift of the number of bits - 1.  */
4255		  if (code == ASHIFTRT)
4256		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4257		  else
4258		    break;
4259		}
4260
4261	      y = copy_rtx (XEXP (y, 0));
4262
4263	      /* If Y contains our first operand (the most common way this
4264		 can happen is if Y is a MEM), we would do into an infinite
4265		 loop if we tried to fold it.  So don't in that case.  */
4266
4267	      if (! reg_mentioned_p (folded_arg0, y))
4268		y = fold_rtx (y, insn);
4269
4270	      return simplify_gen_binary (code, mode, y, new_const);
4271	    }
4272	  break;
4273
4274	default:
4275	  break;
4276	}
4277
4278      new = simplify_binary_operation (code, mode,
4279				       const_arg0 ? const_arg0 : folded_arg0,
4280				       const_arg1 ? const_arg1 : folded_arg1);
4281      break;
4282
4283    case 'o':
4284      /* (lo_sum (high X) X) is simply X.  */
4285      if (code == LO_SUM && const_arg0 != 0
4286	  && GET_CODE (const_arg0) == HIGH
4287	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4288	return const_arg1;
4289      break;
4290
4291    case '3':
4292    case 'b':
4293      new = simplify_ternary_operation (code, mode, mode_arg0,
4294					const_arg0 ? const_arg0 : folded_arg0,
4295					const_arg1 ? const_arg1 : folded_arg1,
4296					const_arg2 ? const_arg2 : XEXP (x, 2));
4297      break;
4298
4299    case 'x':
4300      /* Always eliminate CONSTANT_P_RTX at this stage.  */
4301      if (code == CONSTANT_P_RTX)
4302	return (const_arg0 ? const1_rtx : const0_rtx);
4303      break;
4304    }
4305
4306  return new ? new : x;
4307}
4308
4309/* Return a constant value currently equivalent to X.
4310   Return 0 if we don't know one.  */
4311
4312static rtx
4313equiv_constant (x)
4314     rtx x;
4315{
4316  if (GET_CODE (x) == REG
4317      && REGNO_QTY_VALID_P (REGNO (x)))
4318    {
4319      int x_q = REG_QTY (REGNO (x));
4320      struct qty_table_elem *x_ent = &qty_table[x_q];
4321
4322      if (x_ent->const_rtx)
4323	x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4324    }
4325
4326  if (x == 0 || CONSTANT_P (x))
4327    return x;
4328
4329  /* If X is a MEM, try to fold it outside the context of any insn to see if
4330     it might be equivalent to a constant.  That handles the case where it
4331     is a constant-pool reference.  Then try to look it up in the hash table
4332     in case it is something whose value we have seen before.  */
4333
4334  if (GET_CODE (x) == MEM)
4335    {
4336      struct table_elt *elt;
4337
4338      x = fold_rtx (x, NULL_RTX);
4339      if (CONSTANT_P (x))
4340	return x;
4341
4342      elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4343      if (elt == 0)
4344	return 0;
4345
4346      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4347	if (elt->is_const && CONSTANT_P (elt->exp))
4348	  return elt->exp;
4349    }
4350
4351  return 0;
4352}
4353
4354/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4355   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4356   least-significant part of X.
4357   MODE specifies how big a part of X to return.
4358
4359   If the requested operation cannot be done, 0 is returned.
4360
4361   This is similar to gen_lowpart in emit-rtl.c.  */
4362
4363rtx
4364gen_lowpart_if_possible (mode, x)
4365     enum machine_mode mode;
4366     rtx x;
4367{
4368  rtx result = gen_lowpart_common (mode, x);
4369
4370  if (result)
4371    return result;
4372  else if (GET_CODE (x) == MEM)
4373    {
4374      /* This is the only other case we handle.  */
4375      int offset = 0;
4376      rtx new;
4377
4378      if (WORDS_BIG_ENDIAN)
4379	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4380		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4381      if (BYTES_BIG_ENDIAN)
4382	/* Adjust the address so that the address-after-the-data is
4383	   unchanged.  */
4384	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4385		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4386
4387      new = adjust_address_nv (x, mode, offset);
4388      if (! memory_address_p (mode, XEXP (new, 0)))
4389	return 0;
4390
4391      return new;
4392    }
4393  else
4394    return 0;
4395}
4396
4397/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4398   branch.  It will be zero if not.
4399
4400   In certain cases, this can cause us to add an equivalence.  For example,
4401   if we are following the taken case of
4402   	if (i == 2)
4403   we can add the fact that `i' and '2' are now equivalent.
4404
4405   In any case, we can record that this comparison was passed.  If the same
4406   comparison is seen later, we will know its value.  */
4407
4408static void
4409record_jump_equiv (insn, taken)
4410     rtx insn;
4411     int taken;
4412{
4413  int cond_known_true;
4414  rtx op0, op1;
4415  rtx set;
4416  enum machine_mode mode, mode0, mode1;
4417  int reversed_nonequality = 0;
4418  enum rtx_code code;
4419
4420  /* Ensure this is the right kind of insn.  */
4421  if (! any_condjump_p (insn))
4422    return;
4423  set = pc_set (insn);
4424
4425  /* See if this jump condition is known true or false.  */
4426  if (taken)
4427    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4428  else
4429    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4430
4431  /* Get the type of comparison being done and the operands being compared.
4432     If we had to reverse a non-equality condition, record that fact so we
4433     know that it isn't valid for floating-point.  */
4434  code = GET_CODE (XEXP (SET_SRC (set), 0));
4435  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4436  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4437
4438  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4439  if (! cond_known_true)
4440    {
4441      code = reversed_comparison_code_parts (code, op0, op1, insn);
4442
4443      /* Don't remember if we can't find the inverse.  */
4444      if (code == UNKNOWN)
4445	return;
4446    }
4447
4448  /* The mode is the mode of the non-constant.  */
4449  mode = mode0;
4450  if (mode1 != VOIDmode)
4451    mode = mode1;
4452
4453  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4454}
4455
4456/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4457   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4458   Make any useful entries we can with that information.  Called from
4459   above function and called recursively.  */
4460
4461static void
4462record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4463     enum rtx_code code;
4464     enum machine_mode mode;
4465     rtx op0, op1;
4466     int reversed_nonequality;
4467{
4468  unsigned op0_hash, op1_hash;
4469  int op0_in_memory, op1_in_memory;
4470  struct table_elt *op0_elt, *op1_elt;
4471
4472  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4473     we know that they are also equal in the smaller mode (this is also
4474     true for all smaller modes whether or not there is a SUBREG, but
4475     is not worth testing for with no SUBREG).  */
4476
4477  /* Note that GET_MODE (op0) may not equal MODE.  */
4478  if (code == EQ && GET_CODE (op0) == SUBREG
4479      && (GET_MODE_SIZE (GET_MODE (op0))
4480	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4481    {
4482      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4483      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4484
4485      record_jump_cond (code, mode, SUBREG_REG (op0),
4486			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4487			reversed_nonequality);
4488    }
4489
4490  if (code == EQ && GET_CODE (op1) == SUBREG
4491      && (GET_MODE_SIZE (GET_MODE (op1))
4492	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4493    {
4494      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4495      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4496
4497      record_jump_cond (code, mode, SUBREG_REG (op1),
4498			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4499			reversed_nonequality);
4500    }
4501
4502  /* Similarly, if this is an NE comparison, and either is a SUBREG
4503     making a smaller mode, we know the whole thing is also NE.  */
4504
4505  /* Note that GET_MODE (op0) may not equal MODE;
4506     if we test MODE instead, we can get an infinite recursion
4507     alternating between two modes each wider than MODE.  */
4508
4509  if (code == NE && GET_CODE (op0) == SUBREG
4510      && subreg_lowpart_p (op0)
4511      && (GET_MODE_SIZE (GET_MODE (op0))
4512	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4513    {
4514      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4515      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4516
4517      record_jump_cond (code, mode, SUBREG_REG (op0),
4518			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4519			reversed_nonequality);
4520    }
4521
4522  if (code == NE && GET_CODE (op1) == SUBREG
4523      && subreg_lowpart_p (op1)
4524      && (GET_MODE_SIZE (GET_MODE (op1))
4525	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4526    {
4527      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4528      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4529
4530      record_jump_cond (code, mode, SUBREG_REG (op1),
4531			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4532			reversed_nonequality);
4533    }
4534
4535  /* Hash both operands.  */
4536
4537  do_not_record = 0;
4538  hash_arg_in_memory = 0;
4539  op0_hash = HASH (op0, mode);
4540  op0_in_memory = hash_arg_in_memory;
4541
4542  if (do_not_record)
4543    return;
4544
4545  do_not_record = 0;
4546  hash_arg_in_memory = 0;
4547  op1_hash = HASH (op1, mode);
4548  op1_in_memory = hash_arg_in_memory;
4549
4550  if (do_not_record)
4551    return;
4552
4553  /* Look up both operands.  */
4554  op0_elt = lookup (op0, op0_hash, mode);
4555  op1_elt = lookup (op1, op1_hash, mode);
4556
4557  /* If both operands are already equivalent or if they are not in the
4558     table but are identical, do nothing.  */
4559  if ((op0_elt != 0 && op1_elt != 0
4560       && op0_elt->first_same_value == op1_elt->first_same_value)
4561      || op0 == op1 || rtx_equal_p (op0, op1))
4562    return;
4563
4564  /* If we aren't setting two things equal all we can do is save this
4565     comparison.   Similarly if this is floating-point.  In the latter
4566     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4567     If we record the equality, we might inadvertently delete code
4568     whose intent was to change -0 to +0.  */
4569
4570  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4571    {
4572      struct qty_table_elem *ent;
4573      int qty;
4574
4575      /* If we reversed a floating-point comparison, if OP0 is not a
4576	 register, or if OP1 is neither a register or constant, we can't
4577	 do anything.  */
4578
4579      if (GET_CODE (op1) != REG)
4580	op1 = equiv_constant (op1);
4581
4582      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4583	  || GET_CODE (op0) != REG || op1 == 0)
4584	return;
4585
4586      /* Put OP0 in the hash table if it isn't already.  This gives it a
4587	 new quantity number.  */
4588      if (op0_elt == 0)
4589	{
4590	  if (insert_regs (op0, NULL, 0))
4591	    {
4592	      rehash_using_reg (op0);
4593	      op0_hash = HASH (op0, mode);
4594
4595	      /* If OP0 is contained in OP1, this changes its hash code
4596		 as well.  Faster to rehash than to check, except
4597		 for the simple case of a constant.  */
4598	      if (! CONSTANT_P (op1))
4599		op1_hash = HASH (op1,mode);
4600	    }
4601
4602	  op0_elt = insert (op0, NULL, op0_hash, mode);
4603	  op0_elt->in_memory = op0_in_memory;
4604	}
4605
4606      qty = REG_QTY (REGNO (op0));
4607      ent = &qty_table[qty];
4608
4609      ent->comparison_code = code;
4610      if (GET_CODE (op1) == REG)
4611	{
4612	  /* Look it up again--in case op0 and op1 are the same.  */
4613	  op1_elt = lookup (op1, op1_hash, mode);
4614
4615	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4616	  if (op1_elt == 0)
4617	    {
4618	      if (insert_regs (op1, NULL, 0))
4619		{
4620		  rehash_using_reg (op1);
4621		  op1_hash = HASH (op1, mode);
4622		}
4623
4624	      op1_elt = insert (op1, NULL, op1_hash, mode);
4625	      op1_elt->in_memory = op1_in_memory;
4626	    }
4627
4628	  ent->comparison_const = NULL_RTX;
4629	  ent->comparison_qty = REG_QTY (REGNO (op1));
4630	}
4631      else
4632	{
4633	  ent->comparison_const = op1;
4634	  ent->comparison_qty = -1;
4635	}
4636
4637      return;
4638    }
4639
4640  /* If either side is still missing an equivalence, make it now,
4641     then merge the equivalences.  */
4642
4643  if (op0_elt == 0)
4644    {
4645      if (insert_regs (op0, NULL, 0))
4646	{
4647	  rehash_using_reg (op0);
4648	  op0_hash = HASH (op0, mode);
4649	}
4650
4651      op0_elt = insert (op0, NULL, op0_hash, mode);
4652      op0_elt->in_memory = op0_in_memory;
4653    }
4654
4655  if (op1_elt == 0)
4656    {
4657      if (insert_regs (op1, NULL, 0))
4658	{
4659	  rehash_using_reg (op1);
4660	  op1_hash = HASH (op1, mode);
4661	}
4662
4663      op1_elt = insert (op1, NULL, op1_hash, mode);
4664      op1_elt->in_memory = op1_in_memory;
4665    }
4666
4667  merge_equiv_classes (op0_elt, op1_elt);
4668  last_jump_equiv_class = op0_elt;
4669}
4670
4671/* CSE processing for one instruction.
4672   First simplify sources and addresses of all assignments
4673   in the instruction, using previously-computed equivalents values.
4674   Then install the new sources and destinations in the table
4675   of available values.
4676
4677   If LIBCALL_INSN is nonzero, don't record any equivalence made in
4678   the insn.  It means that INSN is inside libcall block.  In this
4679   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4680
4681/* Data on one SET contained in the instruction.  */
4682
4683struct set
4684{
4685  /* The SET rtx itself.  */
4686  rtx rtl;
4687  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4688  rtx src;
4689  /* The hash-table element for the SET_SRC of the SET.  */
4690  struct table_elt *src_elt;
4691  /* Hash value for the SET_SRC.  */
4692  unsigned src_hash;
4693  /* Hash value for the SET_DEST.  */
4694  unsigned dest_hash;
4695  /* The SET_DEST, with SUBREG, etc., stripped.  */
4696  rtx inner_dest;
4697  /* Nonzero if the SET_SRC is in memory.  */
4698  char src_in_memory;
4699  /* Nonzero if the SET_SRC contains something
4700     whose value cannot be predicted and understood.  */
4701  char src_volatile;
4702  /* Original machine mode, in case it becomes a CONST_INT.  */
4703  enum machine_mode mode;
4704  /* A constant equivalent for SET_SRC, if any.  */
4705  rtx src_const;
4706  /* Original SET_SRC value used for libcall notes.  */
4707  rtx orig_src;
4708  /* Hash value of constant equivalent for SET_SRC.  */
4709  unsigned src_const_hash;
4710  /* Table entry for constant equivalent for SET_SRC, if any.  */
4711  struct table_elt *src_const_elt;
4712};
4713
4714static void
4715cse_insn (insn, libcall_insn)
4716     rtx insn;
4717     rtx libcall_insn;
4718{
4719  rtx x = PATTERN (insn);
4720  int i;
4721  rtx tem;
4722  int n_sets = 0;
4723
4724#ifdef HAVE_cc0
4725  /* Records what this insn does to set CC0.  */
4726  rtx this_insn_cc0 = 0;
4727  enum machine_mode this_insn_cc0_mode = VOIDmode;
4728#endif
4729
4730  rtx src_eqv = 0;
4731  struct table_elt *src_eqv_elt = 0;
4732  int src_eqv_volatile = 0;
4733  int src_eqv_in_memory = 0;
4734  unsigned src_eqv_hash = 0;
4735
4736  struct set *sets = (struct set *) 0;
4737
4738  this_insn = insn;
4739
4740  /* Find all the SETs and CLOBBERs in this instruction.
4741     Record all the SETs in the array `set' and count them.
4742     Also determine whether there is a CLOBBER that invalidates
4743     all memory references, or all references at varying addresses.  */
4744
4745  if (GET_CODE (insn) == CALL_INSN)
4746    {
4747      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4748	{
4749	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4750	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4751	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4752	}
4753    }
4754
4755  if (GET_CODE (x) == SET)
4756    {
4757      sets = (struct set *) alloca (sizeof (struct set));
4758      sets[0].rtl = x;
4759
4760      /* Ignore SETs that are unconditional jumps.
4761	 They never need cse processing, so this does not hurt.
4762	 The reason is not efficiency but rather
4763	 so that we can test at the end for instructions
4764	 that have been simplified to unconditional jumps
4765	 and not be misled by unchanged instructions
4766	 that were unconditional jumps to begin with.  */
4767      if (SET_DEST (x) == pc_rtx
4768	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4769	;
4770
4771      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4772	 The hard function value register is used only once, to copy to
4773	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4774	 Ensure we invalidate the destination register.  On the 80386 no
4775	 other code would invalidate it since it is a fixed_reg.
4776	 We need not check the return of apply_change_group; see canon_reg.  */
4777
4778      else if (GET_CODE (SET_SRC (x)) == CALL)
4779	{
4780	  canon_reg (SET_SRC (x), insn);
4781	  apply_change_group ();
4782	  fold_rtx (SET_SRC (x), insn);
4783	  invalidate (SET_DEST (x), VOIDmode);
4784	}
4785      else
4786	n_sets = 1;
4787    }
4788  else if (GET_CODE (x) == PARALLEL)
4789    {
4790      int lim = XVECLEN (x, 0);
4791
4792      sets = (struct set *) alloca (lim * sizeof (struct set));
4793
4794      /* Find all regs explicitly clobbered in this insn,
4795	 and ensure they are not replaced with any other regs
4796	 elsewhere in this insn.
4797	 When a reg that is clobbered is also used for input,
4798	 we should presume that that is for a reason,
4799	 and we should not substitute some other register
4800	 which is not supposed to be clobbered.
4801	 Therefore, this loop cannot be merged into the one below
4802	 because a CALL may precede a CLOBBER and refer to the
4803	 value clobbered.  We must not let a canonicalization do
4804	 anything in that case.  */
4805      for (i = 0; i < lim; i++)
4806	{
4807	  rtx y = XVECEXP (x, 0, i);
4808	  if (GET_CODE (y) == CLOBBER)
4809	    {
4810	      rtx clobbered = XEXP (y, 0);
4811
4812	      if (GET_CODE (clobbered) == REG
4813		  || GET_CODE (clobbered) == SUBREG)
4814		invalidate (clobbered, VOIDmode);
4815	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4816		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4817		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4818	    }
4819	}
4820
4821      for (i = 0; i < lim; i++)
4822	{
4823	  rtx y = XVECEXP (x, 0, i);
4824	  if (GET_CODE (y) == SET)
4825	    {
4826	      /* As above, we ignore unconditional jumps and call-insns and
4827		 ignore the result of apply_change_group.  */
4828	      if (GET_CODE (SET_SRC (y)) == CALL)
4829		{
4830		  canon_reg (SET_SRC (y), insn);
4831		  apply_change_group ();
4832		  fold_rtx (SET_SRC (y), insn);
4833		  invalidate (SET_DEST (y), VOIDmode);
4834		}
4835	      else if (SET_DEST (y) == pc_rtx
4836		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4837		;
4838	      else
4839		sets[n_sets++].rtl = y;
4840	    }
4841	  else if (GET_CODE (y) == CLOBBER)
4842	    {
4843	      /* If we clobber memory, canon the address.
4844		 This does nothing when a register is clobbered
4845		 because we have already invalidated the reg.  */
4846	      if (GET_CODE (XEXP (y, 0)) == MEM)
4847		canon_reg (XEXP (y, 0), NULL_RTX);
4848	    }
4849	  else if (GET_CODE (y) == USE
4850		   && ! (GET_CODE (XEXP (y, 0)) == REG
4851			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4852	    canon_reg (y, NULL_RTX);
4853	  else if (GET_CODE (y) == CALL)
4854	    {
4855	      /* The result of apply_change_group can be ignored; see
4856		 canon_reg.  */
4857	      canon_reg (y, insn);
4858	      apply_change_group ();
4859	      fold_rtx (y, insn);
4860	    }
4861	}
4862    }
4863  else if (GET_CODE (x) == CLOBBER)
4864    {
4865      if (GET_CODE (XEXP (x, 0)) == MEM)
4866	canon_reg (XEXP (x, 0), NULL_RTX);
4867    }
4868
4869  /* Canonicalize a USE of a pseudo register or memory location.  */
4870  else if (GET_CODE (x) == USE
4871	   && ! (GET_CODE (XEXP (x, 0)) == REG
4872		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4873    canon_reg (XEXP (x, 0), NULL_RTX);
4874  else if (GET_CODE (x) == CALL)
4875    {
4876      /* The result of apply_change_group can be ignored; see canon_reg.  */
4877      canon_reg (x, insn);
4878      apply_change_group ();
4879      fold_rtx (x, insn);
4880    }
4881
4882  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4883     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4884     is handled specially for this case, and if it isn't set, then there will
4885     be no equivalence for the destination.  */
4886  if (n_sets == 1 && REG_NOTES (insn) != 0
4887      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4888      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4889	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4890    src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4891
4892  /* Canonicalize sources and addresses of destinations.
4893     We do this in a separate pass to avoid problems when a MATCH_DUP is
4894     present in the insn pattern.  In that case, we want to ensure that
4895     we don't break the duplicate nature of the pattern.  So we will replace
4896     both operands at the same time.  Otherwise, we would fail to find an
4897     equivalent substitution in the loop calling validate_change below.
4898
4899     We used to suppress canonicalization of DEST if it appears in SRC,
4900     but we don't do this any more.  */
4901
4902  for (i = 0; i < n_sets; i++)
4903    {
4904      rtx dest = SET_DEST (sets[i].rtl);
4905      rtx src = SET_SRC (sets[i].rtl);
4906      rtx new = canon_reg (src, insn);
4907      int insn_code;
4908
4909      sets[i].orig_src = src;
4910      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4911	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4912	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4913	  || (insn_code = recog_memoized (insn)) < 0
4914	  || insn_data[insn_code].n_dups > 0)
4915	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4916      else
4917	SET_SRC (sets[i].rtl) = new;
4918
4919      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4920	{
4921	  validate_change (insn, &XEXP (dest, 1),
4922			   canon_reg (XEXP (dest, 1), insn), 1);
4923	  validate_change (insn, &XEXP (dest, 2),
4924			   canon_reg (XEXP (dest, 2), insn), 1);
4925	}
4926
4927      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4928	     || GET_CODE (dest) == ZERO_EXTRACT
4929	     || GET_CODE (dest) == SIGN_EXTRACT)
4930	dest = XEXP (dest, 0);
4931
4932      if (GET_CODE (dest) == MEM)
4933	canon_reg (dest, insn);
4934    }
4935
4936  /* Now that we have done all the replacements, we can apply the change
4937     group and see if they all work.  Note that this will cause some
4938     canonicalizations that would have worked individually not to be applied
4939     because some other canonicalization didn't work, but this should not
4940     occur often.
4941
4942     The result of apply_change_group can be ignored; see canon_reg.  */
4943
4944  apply_change_group ();
4945
4946  /* Set sets[i].src_elt to the class each source belongs to.
4947     Detect assignments from or to volatile things
4948     and set set[i] to zero so they will be ignored
4949     in the rest of this function.
4950
4951     Nothing in this loop changes the hash table or the register chains.  */
4952
4953  for (i = 0; i < n_sets; i++)
4954    {
4955      rtx src, dest;
4956      rtx src_folded;
4957      struct table_elt *elt = 0, *p;
4958      enum machine_mode mode;
4959      rtx src_eqv_here;
4960      rtx src_const = 0;
4961      rtx src_related = 0;
4962      struct table_elt *src_const_elt = 0;
4963      int src_cost = MAX_COST;
4964      int src_eqv_cost = MAX_COST;
4965      int src_folded_cost = MAX_COST;
4966      int src_related_cost = MAX_COST;
4967      int src_elt_cost = MAX_COST;
4968      int src_regcost = MAX_COST;
4969      int src_eqv_regcost = MAX_COST;
4970      int src_folded_regcost = MAX_COST;
4971      int src_related_regcost = MAX_COST;
4972      int src_elt_regcost = MAX_COST;
4973      /* Set non-zero if we need to call force_const_mem on with the
4974	 contents of src_folded before using it.  */
4975      int src_folded_force_flag = 0;
4976
4977      dest = SET_DEST (sets[i].rtl);
4978      src = SET_SRC (sets[i].rtl);
4979
4980      /* If SRC is a constant that has no machine mode,
4981	 hash it with the destination's machine mode.
4982	 This way we can keep different modes separate.  */
4983
4984      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4985      sets[i].mode = mode;
4986
4987      if (src_eqv)
4988	{
4989	  enum machine_mode eqvmode = mode;
4990	  if (GET_CODE (dest) == STRICT_LOW_PART)
4991	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4992	  do_not_record = 0;
4993	  hash_arg_in_memory = 0;
4994	  src_eqv = fold_rtx (src_eqv, insn);
4995	  src_eqv_hash = HASH (src_eqv, eqvmode);
4996
4997	  /* Find the equivalence class for the equivalent expression.  */
4998
4999	  if (!do_not_record)
5000	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5001
5002	  src_eqv_volatile = do_not_record;
5003	  src_eqv_in_memory = hash_arg_in_memory;
5004	}
5005
5006      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5007	 value of the INNER register, not the destination.  So it is not
5008	 a valid substitution for the source.  But save it for later.  */
5009      if (GET_CODE (dest) == STRICT_LOW_PART)
5010	src_eqv_here = 0;
5011      else
5012	src_eqv_here = src_eqv;
5013
5014      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5015	 simplified result, which may not necessarily be valid.  */
5016      src_folded = fold_rtx (src, insn);
5017
5018#if 0
5019      /* ??? This caused bad code to be generated for the m68k port with -O2.
5020	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5021	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5022	 At the end we will add src and src_const to the same equivalence
5023	 class.  We now have 3 and -1 on the same equivalence class.  This
5024	 causes later instructions to be mis-optimized.  */
5025      /* If storing a constant in a bitfield, pre-truncate the constant
5026	 so we will be able to record it later.  */
5027      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5028	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5029	{
5030	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5031
5032	  if (GET_CODE (src) == CONST_INT
5033	      && GET_CODE (width) == CONST_INT
5034	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5035	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5036	    src_folded
5037	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5038					  << INTVAL (width)) - 1));
5039	}
5040#endif
5041
5042      /* Compute SRC's hash code, and also notice if it
5043	 should not be recorded at all.  In that case,
5044	 prevent any further processing of this assignment.  */
5045      do_not_record = 0;
5046      hash_arg_in_memory = 0;
5047
5048      sets[i].src = src;
5049      sets[i].src_hash = HASH (src, mode);
5050      sets[i].src_volatile = do_not_record;
5051      sets[i].src_in_memory = hash_arg_in_memory;
5052
5053      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5054	 a pseudo, do not record SRC.  Using SRC as a replacement for
5055	 anything else will be incorrect in that situation.  Note that
5056	 this usually occurs only for stack slots, in which case all the
5057	 RTL would be referring to SRC, so we don't lose any optimization
5058	 opportunities by not having SRC in the hash table.  */
5059
5060      if (GET_CODE (src) == MEM
5061	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5062	  && GET_CODE (dest) == REG
5063	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5064	sets[i].src_volatile = 1;
5065
5066#if 0
5067      /* It is no longer clear why we used to do this, but it doesn't
5068	 appear to still be needed.  So let's try without it since this
5069	 code hurts cse'ing widened ops.  */
5070      /* If source is a perverse subreg (such as QI treated as an SI),
5071	 treat it as volatile.  It may do the work of an SI in one context
5072	 where the extra bits are not being used, but cannot replace an SI
5073	 in general.  */
5074      if (GET_CODE (src) == SUBREG
5075	  && (GET_MODE_SIZE (GET_MODE (src))
5076	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5077	sets[i].src_volatile = 1;
5078#endif
5079
5080      /* Locate all possible equivalent forms for SRC.  Try to replace
5081         SRC in the insn with each cheaper equivalent.
5082
5083         We have the following types of equivalents: SRC itself, a folded
5084         version, a value given in a REG_EQUAL note, or a value related
5085	 to a constant.
5086
5087         Each of these equivalents may be part of an additional class
5088         of equivalents (if more than one is in the table, they must be in
5089         the same class; we check for this).
5090
5091	 If the source is volatile, we don't do any table lookups.
5092
5093         We note any constant equivalent for possible later use in a
5094         REG_NOTE.  */
5095
5096      if (!sets[i].src_volatile)
5097	elt = lookup (src, sets[i].src_hash, mode);
5098
5099      sets[i].src_elt = elt;
5100
5101      if (elt && src_eqv_here && src_eqv_elt)
5102	{
5103	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5104	    {
5105	      /* The REG_EQUAL is indicating that two formerly distinct
5106		 classes are now equivalent.  So merge them.  */
5107	      merge_equiv_classes (elt, src_eqv_elt);
5108	      src_eqv_hash = HASH (src_eqv, elt->mode);
5109	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5110	    }
5111
5112	  src_eqv_here = 0;
5113	}
5114
5115      else if (src_eqv_elt)
5116	elt = src_eqv_elt;
5117
5118      /* Try to find a constant somewhere and record it in `src_const'.
5119	 Record its table element, if any, in `src_const_elt'.  Look in
5120	 any known equivalences first.  (If the constant is not in the
5121	 table, also set `sets[i].src_const_hash').  */
5122      if (elt)
5123	for (p = elt->first_same_value; p; p = p->next_same_value)
5124	  if (p->is_const)
5125	    {
5126	      src_const = p->exp;
5127	      src_const_elt = elt;
5128	      break;
5129	    }
5130
5131      if (src_const == 0
5132	  && (CONSTANT_P (src_folded)
5133	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5134		 "constant" here so we will record it. This allows us
5135		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5136	      || (GET_CODE (src_folded) == MINUS
5137		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5138		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5139	src_const = src_folded, src_const_elt = elt;
5140      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5141	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5142
5143      /* If we don't know if the constant is in the table, get its
5144	 hash code and look it up.  */
5145      if (src_const && src_const_elt == 0)
5146	{
5147	  sets[i].src_const_hash = HASH (src_const, mode);
5148	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5149	}
5150
5151      sets[i].src_const = src_const;
5152      sets[i].src_const_elt = src_const_elt;
5153
5154      /* If the constant and our source are both in the table, mark them as
5155	 equivalent.  Otherwise, if a constant is in the table but the source
5156	 isn't, set ELT to it.  */
5157      if (src_const_elt && elt
5158	  && src_const_elt->first_same_value != elt->first_same_value)
5159	merge_equiv_classes (elt, src_const_elt);
5160      else if (src_const_elt && elt == 0)
5161	elt = src_const_elt;
5162
5163      /* See if there is a register linearly related to a constant
5164         equivalent of SRC.  */
5165      if (src_const
5166	  && (GET_CODE (src_const) == CONST
5167	      || (src_const_elt && src_const_elt->related_value != 0)))
5168	{
5169	  src_related = use_related_value (src_const, src_const_elt);
5170	  if (src_related)
5171	    {
5172	      struct table_elt *src_related_elt
5173		= lookup (src_related, HASH (src_related, mode), mode);
5174	      if (src_related_elt && elt)
5175		{
5176		  if (elt->first_same_value
5177		      != src_related_elt->first_same_value)
5178		    /* This can occur when we previously saw a CONST
5179		       involving a SYMBOL_REF and then see the SYMBOL_REF
5180		       twice.  Merge the involved classes.  */
5181		    merge_equiv_classes (elt, src_related_elt);
5182
5183		  src_related = 0;
5184		  src_related_elt = 0;
5185		}
5186	      else if (src_related_elt && elt == 0)
5187		elt = src_related_elt;
5188	    }
5189	}
5190
5191      /* See if we have a CONST_INT that is already in a register in a
5192	 wider mode.  */
5193
5194      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5195	  && GET_MODE_CLASS (mode) == MODE_INT
5196	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5197	{
5198	  enum machine_mode wider_mode;
5199
5200	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5201	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5202	       && src_related == 0;
5203	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5204	    {
5205	      struct table_elt *const_elt
5206		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5207
5208	      if (const_elt == 0)
5209		continue;
5210
5211	      for (const_elt = const_elt->first_same_value;
5212		   const_elt; const_elt = const_elt->next_same_value)
5213		if (GET_CODE (const_elt->exp) == REG)
5214		  {
5215		    src_related = gen_lowpart_if_possible (mode,
5216							   const_elt->exp);
5217		    break;
5218		  }
5219	    }
5220	}
5221
5222      /* Another possibility is that we have an AND with a constant in
5223	 a mode narrower than a word.  If so, it might have been generated
5224	 as part of an "if" which would narrow the AND.  If we already
5225	 have done the AND in a wider mode, we can use a SUBREG of that
5226	 value.  */
5227
5228      if (flag_expensive_optimizations && ! src_related
5229	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5230	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5231	{
5232	  enum machine_mode tmode;
5233	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5234
5235	  for (tmode = GET_MODE_WIDER_MODE (mode);
5236	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5237	       tmode = GET_MODE_WIDER_MODE (tmode))
5238	    {
5239	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5240	      struct table_elt *larger_elt;
5241
5242	      if (inner)
5243		{
5244		  PUT_MODE (new_and, tmode);
5245		  XEXP (new_and, 0) = inner;
5246		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5247		  if (larger_elt == 0)
5248		    continue;
5249
5250		  for (larger_elt = larger_elt->first_same_value;
5251		       larger_elt; larger_elt = larger_elt->next_same_value)
5252		    if (GET_CODE (larger_elt->exp) == REG)
5253		      {
5254			src_related
5255			  = gen_lowpart_if_possible (mode, larger_elt->exp);
5256			break;
5257		      }
5258
5259		  if (src_related)
5260		    break;
5261		}
5262	    }
5263	}
5264
5265#ifdef LOAD_EXTEND_OP
5266      /* See if a MEM has already been loaded with a widening operation;
5267	 if it has, we can use a subreg of that.  Many CISC machines
5268	 also have such operations, but this is only likely to be
5269	 beneficial these machines.  */
5270
5271      if (flag_expensive_optimizations && src_related == 0
5272	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5273	  && GET_MODE_CLASS (mode) == MODE_INT
5274	  && GET_CODE (src) == MEM && ! do_not_record
5275	  && LOAD_EXTEND_OP (mode) != NIL)
5276	{
5277	  enum machine_mode tmode;
5278
5279	  /* Set what we are trying to extend and the operation it might
5280	     have been extended with.  */
5281	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5282	  XEXP (memory_extend_rtx, 0) = src;
5283
5284	  for (tmode = GET_MODE_WIDER_MODE (mode);
5285	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5286	       tmode = GET_MODE_WIDER_MODE (tmode))
5287	    {
5288	      struct table_elt *larger_elt;
5289
5290	      PUT_MODE (memory_extend_rtx, tmode);
5291	      larger_elt = lookup (memory_extend_rtx,
5292				   HASH (memory_extend_rtx, tmode), tmode);
5293	      if (larger_elt == 0)
5294		continue;
5295
5296	      for (larger_elt = larger_elt->first_same_value;
5297		   larger_elt; larger_elt = larger_elt->next_same_value)
5298		if (GET_CODE (larger_elt->exp) == REG)
5299		  {
5300		    src_related = gen_lowpart_if_possible (mode,
5301							   larger_elt->exp);
5302		    break;
5303		  }
5304
5305	      if (src_related)
5306		break;
5307	    }
5308	}
5309#endif /* LOAD_EXTEND_OP */
5310
5311      if (src == src_folded)
5312	src_folded = 0;
5313
5314      /* At this point, ELT, if non-zero, points to a class of expressions
5315         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5316	 and SRC_RELATED, if non-zero, each contain additional equivalent
5317	 expressions.  Prune these latter expressions by deleting expressions
5318	 already in the equivalence class.
5319
5320	 Check for an equivalent identical to the destination.  If found,
5321	 this is the preferred equivalent since it will likely lead to
5322	 elimination of the insn.  Indicate this by placing it in
5323	 `src_related'.  */
5324
5325      if (elt)
5326	elt = elt->first_same_value;
5327      for (p = elt; p; p = p->next_same_value)
5328	{
5329	  enum rtx_code code = GET_CODE (p->exp);
5330
5331	  /* If the expression is not valid, ignore it.  Then we do not
5332	     have to check for validity below.  In most cases, we can use
5333	     `rtx_equal_p', since canonicalization has already been done.  */
5334	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5335	    continue;
5336
5337	  /* Also skip paradoxical subregs, unless that's what we're
5338	     looking for.  */
5339	  if (code == SUBREG
5340	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5341		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5342	      && ! (src != 0
5343		    && GET_CODE (src) == SUBREG
5344		    && GET_MODE (src) == GET_MODE (p->exp)
5345		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5346			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5347	    continue;
5348
5349	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5350	    src = 0;
5351	  else if (src_folded && GET_CODE (src_folded) == code
5352		   && rtx_equal_p (src_folded, p->exp))
5353	    src_folded = 0;
5354	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5355		   && rtx_equal_p (src_eqv_here, p->exp))
5356	    src_eqv_here = 0;
5357	  else if (src_related && GET_CODE (src_related) == code
5358		   && rtx_equal_p (src_related, p->exp))
5359	    src_related = 0;
5360
5361	  /* This is the same as the destination of the insns, we want
5362	     to prefer it.  Copy it to src_related.  The code below will
5363	     then give it a negative cost.  */
5364	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5365	    src_related = dest;
5366	}
5367
5368      /* Find the cheapest valid equivalent, trying all the available
5369         possibilities.  Prefer items not in the hash table to ones
5370         that are when they are equal cost.  Note that we can never
5371         worsen an insn as the current contents will also succeed.
5372	 If we find an equivalent identical to the destination, use it as best,
5373	 since this insn will probably be eliminated in that case.  */
5374      if (src)
5375	{
5376	  if (rtx_equal_p (src, dest))
5377	    src_cost = src_regcost = -1;
5378	  else
5379	    {
5380	      src_cost = COST (src);
5381	      src_regcost = approx_reg_cost (src);
5382	    }
5383	}
5384
5385      if (src_eqv_here)
5386	{
5387	  if (rtx_equal_p (src_eqv_here, dest))
5388	    src_eqv_cost = src_eqv_regcost = -1;
5389	  else
5390	    {
5391	      src_eqv_cost = COST (src_eqv_here);
5392	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5393	    }
5394	}
5395
5396      if (src_folded)
5397	{
5398	  if (rtx_equal_p (src_folded, dest))
5399	    src_folded_cost = src_folded_regcost = -1;
5400	  else
5401	    {
5402	      src_folded_cost = COST (src_folded);
5403	      src_folded_regcost = approx_reg_cost (src_folded);
5404	    }
5405	}
5406
5407      if (src_related)
5408	{
5409	  if (rtx_equal_p (src_related, dest))
5410	    src_related_cost = src_related_regcost = -1;
5411	  else
5412	    {
5413	      src_related_cost = COST (src_related);
5414	      src_related_regcost = approx_reg_cost (src_related);
5415	    }
5416	}
5417
5418      /* If this was an indirect jump insn, a known label will really be
5419	 cheaper even though it looks more expensive.  */
5420      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5421	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5422
5423      /* Terminate loop when replacement made.  This must terminate since
5424         the current contents will be tested and will always be valid.  */
5425      while (1)
5426	{
5427	  rtx trial;
5428
5429	  /* Skip invalid entries.  */
5430	  while (elt && GET_CODE (elt->exp) != REG
5431		 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5432	    elt = elt->next_same_value;
5433
5434	  /* A paradoxical subreg would be bad here: it'll be the right
5435	     size, but later may be adjusted so that the upper bits aren't
5436	     what we want.  So reject it.  */
5437	  if (elt != 0
5438	      && GET_CODE (elt->exp) == SUBREG
5439	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5440		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5441	      /* It is okay, though, if the rtx we're trying to match
5442		 will ignore any of the bits we can't predict.  */
5443	      && ! (src != 0
5444		    && GET_CODE (src) == SUBREG
5445		    && GET_MODE (src) == GET_MODE (elt->exp)
5446		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5447			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5448	    {
5449	      elt = elt->next_same_value;
5450	      continue;
5451	    }
5452
5453          if (elt)
5454	    {
5455	      src_elt_cost = elt->cost;
5456	      src_elt_regcost = elt->regcost;
5457	    }
5458
5459          /* Find cheapest and skip it for the next time.   For items
5460	     of equal cost, use this order:
5461	     src_folded, src, src_eqv, src_related and hash table entry.  */
5462	  if (src_folded
5463	      && preferrable (src_folded_cost, src_folded_regcost,
5464			      src_cost, src_regcost) <= 0
5465	      && preferrable (src_folded_cost, src_folded_regcost,
5466			      src_eqv_cost, src_eqv_regcost) <= 0
5467	      && preferrable (src_folded_cost, src_folded_regcost,
5468			      src_related_cost, src_related_regcost) <= 0
5469	      && preferrable (src_folded_cost, src_folded_regcost,
5470			      src_elt_cost, src_elt_regcost) <= 0)
5471	    {
5472	      trial = src_folded, src_folded_cost = MAX_COST;
5473	      if (src_folded_force_flag)
5474		trial = force_const_mem (mode, trial);
5475	    }
5476	  else if (src
5477		   && preferrable (src_cost, src_regcost,
5478				   src_eqv_cost, src_eqv_regcost) <= 0
5479		   && preferrable (src_cost, src_regcost,
5480				   src_related_cost, src_related_regcost) <= 0
5481		   && preferrable (src_cost, src_regcost,
5482				   src_elt_cost, src_elt_regcost) <= 0)
5483	    trial = src, src_cost = MAX_COST;
5484	  else if (src_eqv_here
5485		   && preferrable (src_eqv_cost, src_eqv_regcost,
5486				   src_related_cost, src_related_regcost) <= 0
5487		   && preferrable (src_eqv_cost, src_eqv_regcost,
5488				   src_elt_cost, src_elt_regcost) <= 0)
5489	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5490	  else if (src_related
5491		   && preferrable (src_related_cost, src_related_regcost,
5492				   src_elt_cost, src_elt_regcost) <= 0)
5493  	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5494	  else
5495	    {
5496	      trial = copy_rtx (elt->exp);
5497	      elt = elt->next_same_value;
5498	      src_elt_cost = MAX_COST;
5499	    }
5500
5501	  /* We don't normally have an insn matching (set (pc) (pc)), so
5502	     check for this separately here.  We will delete such an
5503	     insn below.
5504
5505	     For other cases such as a table jump or conditional jump
5506	     where we know the ultimate target, go ahead and replace the
5507	     operand.  While that may not make a valid insn, we will
5508	     reemit the jump below (and also insert any necessary
5509	     barriers).  */
5510	  if (n_sets == 1 && dest == pc_rtx
5511	      && (trial == pc_rtx
5512		  || (GET_CODE (trial) == LABEL_REF
5513		      && ! condjump_p (insn))))
5514	    {
5515	      SET_SRC (sets[i].rtl) = trial;
5516	      cse_jumps_altered = 1;
5517	      break;
5518	    }
5519
5520	  /* Look for a substitution that makes a valid insn.  */
5521	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5522	    {
5523	      /* If we just made a substitution inside a libcall, then we
5524		 need to make the same substitution in any notes attached
5525		 to the RETVAL insn.  */
5526	      if (libcall_insn
5527		  && (GET_CODE (sets[i].orig_src) == REG
5528		      || GET_CODE (sets[i].orig_src) == SUBREG
5529		      || GET_CODE (sets[i].orig_src) == MEM))
5530		replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5531			     canon_reg (SET_SRC (sets[i].rtl), insn));
5532
5533	      /* The result of apply_change_group can be ignored; see
5534		 canon_reg.  */
5535
5536	      validate_change (insn, &SET_SRC (sets[i].rtl),
5537			       canon_reg (SET_SRC (sets[i].rtl), insn),
5538			       1);
5539	      apply_change_group ();
5540	      break;
5541	    }
5542
5543	  /* If we previously found constant pool entries for
5544	     constants and this is a constant, try making a
5545	     pool entry.  Put it in src_folded unless we already have done
5546	     this since that is where it likely came from.  */
5547
5548	  else if (constant_pool_entries_cost
5549		   && CONSTANT_P (trial)
5550		   /* Reject cases that will abort in decode_rtx_const.
5551		      On the alpha when simplifying a switch, we get
5552		      (const (truncate (minus (label_ref) (label_ref)))).  */
5553		   && ! (GET_CODE (trial) == CONST
5554			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5555		   /* Likewise on IA-64, except without the truncate.  */
5556		   && ! (GET_CODE (trial) == CONST
5557			 && GET_CODE (XEXP (trial, 0)) == MINUS
5558			 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5559			 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5560		   && (src_folded == 0
5561		       || (GET_CODE (src_folded) != MEM
5562			   && ! src_folded_force_flag))
5563		   && GET_MODE_CLASS (mode) != MODE_CC
5564		   && mode != VOIDmode)
5565	    {
5566	      src_folded_force_flag = 1;
5567	      src_folded = trial;
5568	      src_folded_cost = constant_pool_entries_cost;
5569	    }
5570	}
5571
5572      src = SET_SRC (sets[i].rtl);
5573
5574      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5575	 However, there is an important exception:  If both are registers
5576	 that are not the head of their equivalence class, replace SET_SRC
5577	 with the head of the class.  If we do not do this, we will have
5578	 both registers live over a portion of the basic block.  This way,
5579	 their lifetimes will likely abut instead of overlapping.  */
5580      if (GET_CODE (dest) == REG
5581	  && REGNO_QTY_VALID_P (REGNO (dest)))
5582	{
5583	  int dest_q = REG_QTY (REGNO (dest));
5584	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5585
5586	  if (dest_ent->mode == GET_MODE (dest)
5587	      && dest_ent->first_reg != REGNO (dest)
5588	      && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5589	      /* Don't do this if the original insn had a hard reg as
5590		 SET_SRC or SET_DEST.  */
5591	      && (GET_CODE (sets[i].src) != REG
5592		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5593	      && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5594	    /* We can't call canon_reg here because it won't do anything if
5595	       SRC is a hard register.  */
5596	    {
5597	      int src_q = REG_QTY (REGNO (src));
5598	      struct qty_table_elem *src_ent = &qty_table[src_q];
5599	      int first = src_ent->first_reg;
5600	      rtx new_src
5601		= (first >= FIRST_PSEUDO_REGISTER
5602		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5603
5604	      /* We must use validate-change even for this, because this
5605		 might be a special no-op instruction, suitable only to
5606		 tag notes onto.  */
5607	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5608		{
5609		  src = new_src;
5610		  /* If we had a constant that is cheaper than what we are now
5611		     setting SRC to, use that constant.  We ignored it when we
5612		     thought we could make this into a no-op.  */
5613		  if (src_const && COST (src_const) < COST (src)
5614		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5615					  src_const, 0))
5616		    src = src_const;
5617		}
5618	    }
5619	}
5620
5621      /* If we made a change, recompute SRC values.  */
5622      if (src != sets[i].src)
5623	{
5624	  cse_altered = 1;
5625	  do_not_record = 0;
5626	  hash_arg_in_memory = 0;
5627	  sets[i].src = src;
5628	  sets[i].src_hash = HASH (src, mode);
5629	  sets[i].src_volatile = do_not_record;
5630	  sets[i].src_in_memory = hash_arg_in_memory;
5631	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5632	}
5633
5634      /* If this is a single SET, we are setting a register, and we have an
5635	 equivalent constant, we want to add a REG_NOTE.   We don't want
5636	 to write a REG_EQUAL note for a constant pseudo since verifying that
5637	 that pseudo hasn't been eliminated is a pain.  Such a note also
5638	 won't help anything.
5639
5640	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5641	 which can be created for a reference to a compile time computable
5642	 entry in a jump table.  */
5643
5644      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5645	  && GET_CODE (src_const) != REG
5646	  && ! (GET_CODE (src_const) == CONST
5647		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5648		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5649		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5650	{
5651	  /* Make sure that the rtx is not shared with any other insn.  */
5652	  src_const = copy_rtx (src_const);
5653
5654	  /* Record the actual constant value in a REG_EQUAL note, making
5655	     a new one if one does not already exist.  */
5656	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5657
5658          /* If storing a constant value in a register that
5659	     previously held the constant value 0,
5660	     record this fact with a REG_WAS_0 note on this insn.
5661
5662	     Note that the *register* is required to have previously held 0,
5663	     not just any register in the quantity and we must point to the
5664	     insn that set that register to zero.
5665
5666	     Rather than track each register individually, we just see if
5667	     the last set for this quantity was for this register.  */
5668
5669	  if (REGNO_QTY_VALID_P (REGNO (dest)))
5670	    {
5671	      int dest_q = REG_QTY (REGNO (dest));
5672	      struct qty_table_elem *dest_ent = &qty_table[dest_q];
5673
5674	      if (dest_ent->const_rtx == const0_rtx)
5675		{
5676		  /* See if we previously had a REG_WAS_0 note.  */
5677		  rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5678		  rtx const_insn = dest_ent->const_insn;
5679
5680		  if ((tem = single_set (const_insn)) != 0
5681		      && rtx_equal_p (SET_DEST (tem), dest))
5682		    {
5683		      if (note)
5684			XEXP (note, 0) = const_insn;
5685		      else
5686			REG_NOTES (insn)
5687			  = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5688					       REG_NOTES (insn));
5689		    }
5690		}
5691	    }
5692	}
5693
5694      /* Now deal with the destination.  */
5695      do_not_record = 0;
5696
5697      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5698	 to the MEM or REG within it.  */
5699      while (GET_CODE (dest) == SIGN_EXTRACT
5700	     || GET_CODE (dest) == ZERO_EXTRACT
5701	     || GET_CODE (dest) == SUBREG
5702	     || GET_CODE (dest) == STRICT_LOW_PART)
5703	dest = XEXP (dest, 0);
5704
5705      sets[i].inner_dest = dest;
5706
5707      if (GET_CODE (dest) == MEM)
5708	{
5709#ifdef PUSH_ROUNDING
5710	  /* Stack pushes invalidate the stack pointer.  */
5711	  rtx addr = XEXP (dest, 0);
5712	  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5713	      && XEXP (addr, 0) == stack_pointer_rtx)
5714	    invalidate (stack_pointer_rtx, Pmode);
5715#endif
5716	  dest = fold_rtx (dest, insn);
5717	}
5718
5719      /* Compute the hash code of the destination now,
5720	 before the effects of this instruction are recorded,
5721	 since the register values used in the address computation
5722	 are those before this instruction.  */
5723      sets[i].dest_hash = HASH (dest, mode);
5724
5725      /* Don't enter a bit-field in the hash table
5726	 because the value in it after the store
5727	 may not equal what was stored, due to truncation.  */
5728
5729      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5730	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5731	{
5732	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5733
5734	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5735	      && GET_CODE (width) == CONST_INT
5736	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5737	      && ! (INTVAL (src_const)
5738		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5739	    /* Exception: if the value is constant,
5740	       and it won't be truncated, record it.  */
5741	    ;
5742	  else
5743	    {
5744	      /* This is chosen so that the destination will be invalidated
5745		 but no new value will be recorded.
5746		 We must invalidate because sometimes constant
5747		 values can be recorded for bitfields.  */
5748	      sets[i].src_elt = 0;
5749	      sets[i].src_volatile = 1;
5750	      src_eqv = 0;
5751	      src_eqv_elt = 0;
5752	    }
5753	}
5754
5755      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5756	 the insn.  */
5757      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5758	{
5759	  /* One less use of the label this insn used to jump to.  */
5760	  delete_insn (insn);
5761	  cse_jumps_altered = 1;
5762	  /* No more processing for this set.  */
5763	  sets[i].rtl = 0;
5764	}
5765
5766      /* If this SET is now setting PC to a label, we know it used to
5767	 be a conditional or computed branch.  */
5768      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5769	{
5770	  /* Now emit a BARRIER after the unconditional jump.  */
5771	  if (NEXT_INSN (insn) == 0
5772	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5773	    emit_barrier_after (insn);
5774
5775	  /* We reemit the jump in as many cases as possible just in
5776	     case the form of an unconditional jump is significantly
5777	     different than a computed jump or conditional jump.
5778
5779	     If this insn has multiple sets, then reemitting the
5780	     jump is nontrivial.  So instead we just force rerecognition
5781	     and hope for the best.  */
5782	  if (n_sets == 1)
5783	    {
5784	      rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5785
5786	      JUMP_LABEL (new) = XEXP (src, 0);
5787	      LABEL_NUSES (XEXP (src, 0))++;
5788	      insn = new;
5789
5790	      /* Now emit a BARRIER after the unconditional jump.  */
5791	      if (NEXT_INSN (insn) == 0
5792		  || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5793		emit_barrier_after (insn);
5794	    }
5795	  else
5796	    INSN_CODE (insn) = -1;
5797
5798	  never_reached_warning (insn);
5799
5800	  /* Do not bother deleting any unreachable code,
5801	     let jump/flow do that.  */
5802
5803	  cse_jumps_altered = 1;
5804	  sets[i].rtl = 0;
5805	}
5806
5807      /* If destination is volatile, invalidate it and then do no further
5808	 processing for this assignment.  */
5809
5810      else if (do_not_record)
5811	{
5812	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5813	    invalidate (dest, VOIDmode);
5814	  else if (GET_CODE (dest) == MEM)
5815	    {
5816	      /* Outgoing arguments for a libcall don't
5817		 affect any recorded expressions.  */
5818	      if (! libcall_insn || insn == libcall_insn)
5819		invalidate (dest, VOIDmode);
5820	    }
5821	  else if (GET_CODE (dest) == STRICT_LOW_PART
5822		   || GET_CODE (dest) == ZERO_EXTRACT)
5823	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5824	  sets[i].rtl = 0;
5825	}
5826
5827      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5828	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5829
5830#ifdef HAVE_cc0
5831      /* If setting CC0, record what it was set to, or a constant, if it
5832	 is equivalent to a constant.  If it is being set to a floating-point
5833	 value, make a COMPARE with the appropriate constant of 0.  If we
5834	 don't do this, later code can interpret this as a test against
5835	 const0_rtx, which can cause problems if we try to put it into an
5836	 insn as a floating-point operand.  */
5837      if (dest == cc0_rtx)
5838	{
5839	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5840	  this_insn_cc0_mode = mode;
5841	  if (FLOAT_MODE_P (mode))
5842	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5843					     CONST0_RTX (mode));
5844	}
5845#endif
5846    }
5847
5848  /* Now enter all non-volatile source expressions in the hash table
5849     if they are not already present.
5850     Record their equivalence classes in src_elt.
5851     This way we can insert the corresponding destinations into
5852     the same classes even if the actual sources are no longer in them
5853     (having been invalidated).  */
5854
5855  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5856      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5857    {
5858      struct table_elt *elt;
5859      struct table_elt *classp = sets[0].src_elt;
5860      rtx dest = SET_DEST (sets[0].rtl);
5861      enum machine_mode eqvmode = GET_MODE (dest);
5862
5863      if (GET_CODE (dest) == STRICT_LOW_PART)
5864	{
5865	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5866	  classp = 0;
5867	}
5868      if (insert_regs (src_eqv, classp, 0))
5869	{
5870	  rehash_using_reg (src_eqv);
5871	  src_eqv_hash = HASH (src_eqv, eqvmode);
5872	}
5873      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5874      elt->in_memory = src_eqv_in_memory;
5875      src_eqv_elt = elt;
5876
5877      /* Check to see if src_eqv_elt is the same as a set source which
5878	 does not yet have an elt, and if so set the elt of the set source
5879	 to src_eqv_elt.  */
5880      for (i = 0; i < n_sets; i++)
5881	if (sets[i].rtl && sets[i].src_elt == 0
5882	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5883	  sets[i].src_elt = src_eqv_elt;
5884    }
5885
5886  for (i = 0; i < n_sets; i++)
5887    if (sets[i].rtl && ! sets[i].src_volatile
5888	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5889      {
5890	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5891	  {
5892	    /* REG_EQUAL in setting a STRICT_LOW_PART
5893	       gives an equivalent for the entire destination register,
5894	       not just for the subreg being stored in now.
5895	       This is a more interesting equivalence, so we arrange later
5896	       to treat the entire reg as the destination.  */
5897	    sets[i].src_elt = src_eqv_elt;
5898	    sets[i].src_hash = src_eqv_hash;
5899	  }
5900	else
5901	  {
5902	    /* Insert source and constant equivalent into hash table, if not
5903	       already present.  */
5904	    struct table_elt *classp = src_eqv_elt;
5905	    rtx src = sets[i].src;
5906	    rtx dest = SET_DEST (sets[i].rtl);
5907	    enum machine_mode mode
5908	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5909
5910	    if (sets[i].src_elt == 0)
5911	      {
5912		/* Don't put a hard register source into the table if this is
5913		   the last insn of a libcall.  In this case, we only need
5914		   to put src_eqv_elt in src_elt.  */
5915		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5916		  {
5917		    struct table_elt *elt;
5918
5919		    /* Note that these insert_regs calls cannot remove
5920		       any of the src_elt's, because they would have failed to
5921		       match if not still valid.  */
5922		    if (insert_regs (src, classp, 0))
5923		      {
5924			rehash_using_reg (src);
5925			sets[i].src_hash = HASH (src, mode);
5926		      }
5927		    elt = insert (src, classp, sets[i].src_hash, mode);
5928		    elt->in_memory = sets[i].src_in_memory;
5929		    sets[i].src_elt = classp = elt;
5930		  }
5931		else
5932		  sets[i].src_elt = classp;
5933	      }
5934	    if (sets[i].src_const && sets[i].src_const_elt == 0
5935		&& src != sets[i].src_const
5936		&& ! rtx_equal_p (sets[i].src_const, src))
5937	      sets[i].src_elt = insert (sets[i].src_const, classp,
5938					sets[i].src_const_hash, mode);
5939	  }
5940      }
5941    else if (sets[i].src_elt == 0)
5942      /* If we did not insert the source into the hash table (e.g., it was
5943	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5944	 so that the destination goes into that class.  */
5945      sets[i].src_elt = src_eqv_elt;
5946
5947  invalidate_from_clobbers (x);
5948
5949  /* Some registers are invalidated by subroutine calls.  Memory is
5950     invalidated by non-constant calls.  */
5951
5952  if (GET_CODE (insn) == CALL_INSN)
5953    {
5954      if (! CONST_OR_PURE_CALL_P (insn))
5955	invalidate_memory ();
5956      invalidate_for_call ();
5957    }
5958
5959  /* Now invalidate everything set by this instruction.
5960     If a SUBREG or other funny destination is being set,
5961     sets[i].rtl is still nonzero, so here we invalidate the reg
5962     a part of which is being set.  */
5963
5964  for (i = 0; i < n_sets; i++)
5965    if (sets[i].rtl)
5966      {
5967	/* We can't use the inner dest, because the mode associated with
5968	   a ZERO_EXTRACT is significant.  */
5969	rtx dest = SET_DEST (sets[i].rtl);
5970
5971	/* Needed for registers to remove the register from its
5972	   previous quantity's chain.
5973	   Needed for memory if this is a nonvarying address, unless
5974	   we have just done an invalidate_memory that covers even those.  */
5975	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5976	  invalidate (dest, VOIDmode);
5977	else if (GET_CODE (dest) == MEM)
5978	  {
5979	    /* Outgoing arguments for a libcall don't
5980	       affect any recorded expressions.  */
5981	    if (! libcall_insn || insn == libcall_insn)
5982	      invalidate (dest, VOIDmode);
5983	  }
5984	else if (GET_CODE (dest) == STRICT_LOW_PART
5985		 || GET_CODE (dest) == ZERO_EXTRACT)
5986	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5987      }
5988
5989  /* A volatile ASM invalidates everything.  */
5990  if (GET_CODE (insn) == INSN
5991      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5992      && MEM_VOLATILE_P (PATTERN (insn)))
5993    flush_hash_table ();
5994
5995  /* Make sure registers mentioned in destinations
5996     are safe for use in an expression to be inserted.
5997     This removes from the hash table
5998     any invalid entry that refers to one of these registers.
5999
6000     We don't care about the return value from mention_regs because
6001     we are going to hash the SET_DEST values unconditionally.  */
6002
6003  for (i = 0; i < n_sets; i++)
6004    {
6005      if (sets[i].rtl)
6006	{
6007	  rtx x = SET_DEST (sets[i].rtl);
6008
6009	  if (GET_CODE (x) != REG)
6010	    mention_regs (x);
6011	  else
6012	    {
6013	      /* We used to rely on all references to a register becoming
6014		 inaccessible when a register changes to a new quantity,
6015		 since that changes the hash code.  However, that is not
6016		 safe, since after HASH_SIZE new quantities we get a
6017		 hash 'collision' of a register with its own invalid
6018		 entries.  And since SUBREGs have been changed not to
6019		 change their hash code with the hash code of the register,
6020		 it wouldn't work any longer at all.  So we have to check
6021		 for any invalid references lying around now.
6022		 This code is similar to the REG case in mention_regs,
6023		 but it knows that reg_tick has been incremented, and
6024		 it leaves reg_in_table as -1 .  */
6025	      unsigned int regno = REGNO (x);
6026	      unsigned int endregno
6027		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6028			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6029	      unsigned int i;
6030
6031	      for (i = regno; i < endregno; i++)
6032		{
6033		  if (REG_IN_TABLE (i) >= 0)
6034		    {
6035		      remove_invalid_refs (i);
6036		      REG_IN_TABLE (i) = -1;
6037		    }
6038		}
6039	    }
6040	}
6041    }
6042
6043  /* We may have just removed some of the src_elt's from the hash table.
6044     So replace each one with the current head of the same class.  */
6045
6046  for (i = 0; i < n_sets; i++)
6047    if (sets[i].rtl)
6048      {
6049	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6050	  /* If elt was removed, find current head of same class,
6051	     or 0 if nothing remains of that class.  */
6052	  {
6053	    struct table_elt *elt = sets[i].src_elt;
6054
6055	    while (elt && elt->prev_same_value)
6056	      elt = elt->prev_same_value;
6057
6058	    while (elt && elt->first_same_value == 0)
6059	      elt = elt->next_same_value;
6060	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6061	  }
6062      }
6063
6064  /* Now insert the destinations into their equivalence classes.  */
6065
6066  for (i = 0; i < n_sets; i++)
6067    if (sets[i].rtl)
6068      {
6069	rtx dest = SET_DEST (sets[i].rtl);
6070	rtx inner_dest = sets[i].inner_dest;
6071	struct table_elt *elt;
6072
6073	/* Don't record value if we are not supposed to risk allocating
6074	   floating-point values in registers that might be wider than
6075	   memory.  */
6076	if ((flag_float_store
6077	     && GET_CODE (dest) == MEM
6078	     && FLOAT_MODE_P (GET_MODE (dest)))
6079	    /* Don't record BLKmode values, because we don't know the
6080	       size of it, and can't be sure that other BLKmode values
6081	       have the same or smaller size.  */
6082	    || GET_MODE (dest) == BLKmode
6083	    /* Don't record values of destinations set inside a libcall block
6084	       since we might delete the libcall.  Things should have been set
6085	       up so we won't want to reuse such a value, but we play it safe
6086	       here.  */
6087	    || libcall_insn
6088	    /* If we didn't put a REG_EQUAL value or a source into the hash
6089	       table, there is no point is recording DEST.  */
6090	    || sets[i].src_elt == 0
6091	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6092	       or SIGN_EXTEND, don't record DEST since it can cause
6093	       some tracking to be wrong.
6094
6095	       ??? Think about this more later.  */
6096	    || (GET_CODE (dest) == SUBREG
6097		&& (GET_MODE_SIZE (GET_MODE (dest))
6098		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6099		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6100		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6101	  continue;
6102
6103	/* STRICT_LOW_PART isn't part of the value BEING set,
6104	   and neither is the SUBREG inside it.
6105	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6106	if (GET_CODE (dest) == STRICT_LOW_PART)
6107	  dest = SUBREG_REG (XEXP (dest, 0));
6108
6109	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6110	  /* Registers must also be inserted into chains for quantities.  */
6111	  if (insert_regs (dest, sets[i].src_elt, 1))
6112	    {
6113	      /* If `insert_regs' changes something, the hash code must be
6114		 recalculated.  */
6115	      rehash_using_reg (dest);
6116	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6117	    }
6118
6119	if (GET_CODE (inner_dest) == MEM
6120	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6121	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6122	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
6123	     Consider the case in which the address of the MEM is
6124	     passed to a function, which alters the MEM.  Then, if we
6125	     later use Y instead of the MEM we'll miss the update.  */
6126	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6127	else
6128	  elt = insert (dest, sets[i].src_elt,
6129			sets[i].dest_hash, GET_MODE (dest));
6130
6131	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6132			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6133			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6134							  0))));
6135
6136	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6137	   narrower than M2, and both M1 and M2 are the same number of words,
6138	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6139	   make that equivalence as well.
6140
6141	   However, BAR may have equivalences for which gen_lowpart_if_possible
6142	   will produce a simpler value than gen_lowpart_if_possible applied to
6143	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6144	   BAR's equivalences.  If we don't get a simplified form, make
6145	   the SUBREG.  It will not be used in an equivalence, but will
6146	   cause two similar assignments to be detected.
6147
6148	   Note the loop below will find SUBREG_REG (DEST) since we have
6149	   already entered SRC and DEST of the SET in the table.  */
6150
6151	if (GET_CODE (dest) == SUBREG
6152	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6153		 / UNITS_PER_WORD)
6154		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6155	    && (GET_MODE_SIZE (GET_MODE (dest))
6156		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6157	    && sets[i].src_elt != 0)
6158	  {
6159	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6160	    struct table_elt *elt, *classp = 0;
6161
6162	    for (elt = sets[i].src_elt->first_same_value; elt;
6163		 elt = elt->next_same_value)
6164	      {
6165		rtx new_src = 0;
6166		unsigned src_hash;
6167		struct table_elt *src_elt;
6168
6169		/* Ignore invalid entries.  */
6170		if (GET_CODE (elt->exp) != REG
6171		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6172		  continue;
6173
6174		new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6175		if (new_src == 0)
6176		  new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6177
6178		src_hash = HASH (new_src, new_mode);
6179		src_elt = lookup (new_src, src_hash, new_mode);
6180
6181		/* Put the new source in the hash table is if isn't
6182		   already.  */
6183		if (src_elt == 0)
6184		  {
6185		    if (insert_regs (new_src, classp, 0))
6186		      {
6187			rehash_using_reg (new_src);
6188			src_hash = HASH (new_src, new_mode);
6189		      }
6190		    src_elt = insert (new_src, classp, src_hash, new_mode);
6191		    src_elt->in_memory = elt->in_memory;
6192		  }
6193		else if (classp && classp != src_elt->first_same_value)
6194		  /* Show that two things that we've seen before are
6195		     actually the same.  */
6196		  merge_equiv_classes (src_elt, classp);
6197
6198		classp = src_elt->first_same_value;
6199		/* Ignore invalid entries.  */
6200		while (classp
6201		       && GET_CODE (classp->exp) != REG
6202		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6203		  classp = classp->next_same_value;
6204	      }
6205	  }
6206      }
6207
6208  /* Special handling for (set REG0 REG1) where REG0 is the
6209     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6210     be used in the sequel, so (if easily done) change this insn to
6211     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6212     that computed their value.  Then REG1 will become a dead store
6213     and won't cloud the situation for later optimizations.
6214
6215     Do not make this change if REG1 is a hard register, because it will
6216     then be used in the sequel and we may be changing a two-operand insn
6217     into a three-operand insn.
6218
6219     Also do not do this if we are operating on a copy of INSN.
6220
6221     Also don't do this if INSN ends a libcall; this would cause an unrelated
6222     register to be set in the middle of a libcall, and we then get bad code
6223     if the libcall is deleted.  */
6224
6225  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6226      && NEXT_INSN (PREV_INSN (insn)) == insn
6227      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6228      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6229      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6230    {
6231      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6232      struct qty_table_elem *src_ent = &qty_table[src_q];
6233
6234      if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6235	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6236	{
6237	  rtx prev = prev_nonnote_insn (insn);
6238
6239	  /* Do not swap the registers around if the previous instruction
6240	     attaches a REG_EQUIV note to REG1.
6241
6242	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6243	     from the pseudo that originally shadowed an incoming argument
6244	     to another register.  Some uses of REG_EQUIV might rely on it
6245	     being attached to REG1 rather than REG2.
6246
6247	     This section previously turned the REG_EQUIV into a REG_EQUAL
6248	     note.  We cannot do that because REG_EQUIV may provide an
6249	     uninitialised stack slot when REG_PARM_STACK_SPACE is used.  */
6250
6251	  if (prev != 0 && GET_CODE (prev) == INSN
6252	      && GET_CODE (PATTERN (prev)) == SET
6253	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6254	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6255	    {
6256	      rtx dest = SET_DEST (sets[0].rtl);
6257	      rtx src = SET_SRC (sets[0].rtl);
6258	      rtx note;
6259
6260	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6261	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6262	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6263	      apply_change_group ();
6264
6265	      /* If there was a REG_WAS_0 note on PREV, remove it.  Move
6266		 any REG_WAS_0 note on INSN to PREV.  */
6267	      note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6268	      if (note)
6269		remove_note (prev, note);
6270
6271	      note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6272	      if (note)
6273		{
6274		  remove_note (insn, note);
6275		  XEXP (note, 1) = REG_NOTES (prev);
6276		  REG_NOTES (prev) = note;
6277		}
6278
6279	      /* If INSN has a REG_EQUAL note, and this note mentions
6280		 REG0, then we must delete it, because the value in
6281		 REG0 has changed.  If the note's value is REG1, we must
6282		 also delete it because that is now this insn's dest.  */
6283	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6284	      if (note != 0
6285		  && (reg_mentioned_p (dest, XEXP (note, 0))
6286		      || rtx_equal_p (src, XEXP (note, 0))))
6287		remove_note (insn, note);
6288	    }
6289	}
6290    }
6291
6292  /* If this is a conditional jump insn, record any known equivalences due to
6293     the condition being tested.  */
6294
6295  last_jump_equiv_class = 0;
6296  if (GET_CODE (insn) == JUMP_INSN
6297      && n_sets == 1 && GET_CODE (x) == SET
6298      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6299    record_jump_equiv (insn, 0);
6300
6301#ifdef HAVE_cc0
6302  /* If the previous insn set CC0 and this insn no longer references CC0,
6303     delete the previous insn.  Here we use the fact that nothing expects CC0
6304     to be valid over an insn, which is true until the final pass.  */
6305  if (prev_insn && GET_CODE (prev_insn) == INSN
6306      && (tem = single_set (prev_insn)) != 0
6307      && SET_DEST (tem) == cc0_rtx
6308      && ! reg_mentioned_p (cc0_rtx, x))
6309    delete_insn (prev_insn);
6310
6311  prev_insn_cc0 = this_insn_cc0;
6312  prev_insn_cc0_mode = this_insn_cc0_mode;
6313#endif
6314
6315  prev_insn = insn;
6316}
6317
6318/* Remove from the hash table all expressions that reference memory.  */
6319
6320static void
6321invalidate_memory ()
6322{
6323  int i;
6324  struct table_elt *p, *next;
6325
6326  for (i = 0; i < HASH_SIZE; i++)
6327    for (p = table[i]; p; p = next)
6328      {
6329	next = p->next_same_hash;
6330	if (p->in_memory)
6331	  remove_from_table (p, i);
6332      }
6333}
6334
6335/* If ADDR is an address that implicitly affects the stack pointer, return
6336   1 and update the register tables to show the effect.  Else, return 0.  */
6337
6338static int
6339addr_affects_sp_p (addr)
6340     rtx addr;
6341{
6342  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6343      && GET_CODE (XEXP (addr, 0)) == REG
6344      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6345    {
6346      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6347	REG_TICK (STACK_POINTER_REGNUM)++;
6348
6349      /* This should be *very* rare.  */
6350      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6351	invalidate (stack_pointer_rtx, VOIDmode);
6352
6353      return 1;
6354    }
6355
6356  return 0;
6357}
6358
6359/* Perform invalidation on the basis of everything about an insn
6360   except for invalidating the actual places that are SET in it.
6361   This includes the places CLOBBERed, and anything that might
6362   alias with something that is SET or CLOBBERed.
6363
6364   X is the pattern of the insn.  */
6365
6366static void
6367invalidate_from_clobbers (x)
6368     rtx x;
6369{
6370  if (GET_CODE (x) == CLOBBER)
6371    {
6372      rtx ref = XEXP (x, 0);
6373      if (ref)
6374	{
6375	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6376	      || GET_CODE (ref) == MEM)
6377	    invalidate (ref, VOIDmode);
6378	  else if (GET_CODE (ref) == STRICT_LOW_PART
6379		   || GET_CODE (ref) == ZERO_EXTRACT)
6380	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6381	}
6382    }
6383  else if (GET_CODE (x) == PARALLEL)
6384    {
6385      int i;
6386      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6387	{
6388	  rtx y = XVECEXP (x, 0, i);
6389	  if (GET_CODE (y) == CLOBBER)
6390	    {
6391	      rtx ref = XEXP (y, 0);
6392	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6393		  || GET_CODE (ref) == MEM)
6394		invalidate (ref, VOIDmode);
6395	      else if (GET_CODE (ref) == STRICT_LOW_PART
6396		       || GET_CODE (ref) == ZERO_EXTRACT)
6397		invalidate (XEXP (ref, 0), GET_MODE (ref));
6398	    }
6399	}
6400    }
6401}
6402
6403/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6404   and replace any registers in them with either an equivalent constant
6405   or the canonical form of the register.  If we are inside an address,
6406   only do this if the address remains valid.
6407
6408   OBJECT is 0 except when within a MEM in which case it is the MEM.
6409
6410   Return the replacement for X.  */
6411
6412static rtx
6413cse_process_notes (x, object)
6414     rtx x;
6415     rtx object;
6416{
6417  enum rtx_code code = GET_CODE (x);
6418  const char *fmt = GET_RTX_FORMAT (code);
6419  int i;
6420
6421  switch (code)
6422    {
6423    case CONST_INT:
6424    case CONST:
6425    case SYMBOL_REF:
6426    case LABEL_REF:
6427    case CONST_DOUBLE:
6428    case PC:
6429    case CC0:
6430    case LO_SUM:
6431      return x;
6432
6433    case MEM:
6434      validate_change (x, &XEXP (x, 0),
6435		       cse_process_notes (XEXP (x, 0), x), 0);
6436      return x;
6437
6438    case EXPR_LIST:
6439    case INSN_LIST:
6440      if (REG_NOTE_KIND (x) == REG_EQUAL)
6441	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6442      if (XEXP (x, 1))
6443	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6444      return x;
6445
6446    case SIGN_EXTEND:
6447    case ZERO_EXTEND:
6448    case SUBREG:
6449      {
6450	rtx new = cse_process_notes (XEXP (x, 0), object);
6451	/* We don't substitute VOIDmode constants into these rtx,
6452	   since they would impede folding.  */
6453	if (GET_MODE (new) != VOIDmode)
6454	  validate_change (object, &XEXP (x, 0), new, 0);
6455	return x;
6456      }
6457
6458    case REG:
6459      i = REG_QTY (REGNO (x));
6460
6461      /* Return a constant or a constant register.  */
6462      if (REGNO_QTY_VALID_P (REGNO (x)))
6463	{
6464	  struct qty_table_elem *ent = &qty_table[i];
6465
6466	  if (ent->const_rtx != NULL_RTX
6467	      && (CONSTANT_P (ent->const_rtx)
6468		  || GET_CODE (ent->const_rtx) == REG))
6469	    {
6470	      rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6471	      if (new)
6472		return new;
6473	    }
6474	}
6475
6476      /* Otherwise, canonicalize this register.  */
6477      return canon_reg (x, NULL_RTX);
6478
6479    default:
6480      break;
6481    }
6482
6483  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6484    if (fmt[i] == 'e')
6485      validate_change (object, &XEXP (x, i),
6486		       cse_process_notes (XEXP (x, i), object), 0);
6487
6488  return x;
6489}
6490
6491/* Find common subexpressions between the end test of a loop and the beginning
6492   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
6493
6494   Often we have a loop where an expression in the exit test is used
6495   in the body of the loop.  For example "while (*p) *q++ = *p++;".
6496   Because of the way we duplicate the loop exit test in front of the loop,
6497   however, we don't detect that common subexpression.  This will be caught
6498   when global cse is implemented, but this is a quite common case.
6499
6500   This function handles the most common cases of these common expressions.
6501   It is called after we have processed the basic block ending with the
6502   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6503   jumps to a label used only once.  */
6504
6505static void
6506cse_around_loop (loop_start)
6507     rtx loop_start;
6508{
6509  rtx insn;
6510  int i;
6511  struct table_elt *p;
6512
6513  /* If the jump at the end of the loop doesn't go to the start, we don't
6514     do anything.  */
6515  for (insn = PREV_INSN (loop_start);
6516       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6517       insn = PREV_INSN (insn))
6518    ;
6519
6520  if (insn == 0
6521      || GET_CODE (insn) != NOTE
6522      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6523    return;
6524
6525  /* If the last insn of the loop (the end test) was an NE comparison,
6526     we will interpret it as an EQ comparison, since we fell through
6527     the loop.  Any equivalences resulting from that comparison are
6528     therefore not valid and must be invalidated.  */
6529  if (last_jump_equiv_class)
6530    for (p = last_jump_equiv_class->first_same_value; p;
6531	 p = p->next_same_value)
6532      {
6533	if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6534	    || (GET_CODE (p->exp) == SUBREG
6535		&& GET_CODE (SUBREG_REG (p->exp)) == REG))
6536	  invalidate (p->exp, VOIDmode);
6537	else if (GET_CODE (p->exp) == STRICT_LOW_PART
6538		 || GET_CODE (p->exp) == ZERO_EXTRACT)
6539	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6540      }
6541
6542  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6543     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6544
6545     The only thing we do with SET_DEST is invalidate entries, so we
6546     can safely process each SET in order.  It is slightly less efficient
6547     to do so, but we only want to handle the most common cases.
6548
6549     The gen_move_insn call in cse_set_around_loop may create new pseudos.
6550     These pseudos won't have valid entries in any of the tables indexed
6551     by register number, such as reg_qty.  We avoid out-of-range array
6552     accesses by not processing any instructions created after cse started.  */
6553
6554  for (insn = NEXT_INSN (loop_start);
6555       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6556       && INSN_UID (insn) < max_insn_uid
6557       && ! (GET_CODE (insn) == NOTE
6558	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6559       insn = NEXT_INSN (insn))
6560    {
6561      if (INSN_P (insn)
6562	  && (GET_CODE (PATTERN (insn)) == SET
6563	      || GET_CODE (PATTERN (insn)) == CLOBBER))
6564	cse_set_around_loop (PATTERN (insn), insn, loop_start);
6565      else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6566	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6567	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6568	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6569	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6570				 loop_start);
6571    }
6572}
6573
6574/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6575   since they are done elsewhere.  This function is called via note_stores.  */
6576
6577static void
6578invalidate_skipped_set (dest, set, data)
6579     rtx set;
6580     rtx dest;
6581     void *data ATTRIBUTE_UNUSED;
6582{
6583  enum rtx_code code = GET_CODE (dest);
6584
6585  if (code == MEM
6586      && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6587      /* There are times when an address can appear varying and be a PLUS
6588	 during this scan when it would be a fixed address were we to know
6589	 the proper equivalences.  So invalidate all memory if there is
6590	 a BLKmode or nonscalar memory reference or a reference to a
6591	 variable address.  */
6592      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6593	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6594    {
6595      invalidate_memory ();
6596      return;
6597    }
6598
6599  if (GET_CODE (set) == CLOBBER
6600#ifdef HAVE_cc0
6601      || dest == cc0_rtx
6602#endif
6603      || dest == pc_rtx)
6604    return;
6605
6606  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6607    invalidate (XEXP (dest, 0), GET_MODE (dest));
6608  else if (code == REG || code == SUBREG || code == MEM)
6609    invalidate (dest, VOIDmode);
6610}
6611
6612/* Invalidate all insns from START up to the end of the function or the
6613   next label.  This called when we wish to CSE around a block that is
6614   conditionally executed.  */
6615
6616static void
6617invalidate_skipped_block (start)
6618     rtx start;
6619{
6620  rtx insn;
6621
6622  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6623       insn = NEXT_INSN (insn))
6624    {
6625      if (! INSN_P (insn))
6626	continue;
6627
6628      if (GET_CODE (insn) == CALL_INSN)
6629	{
6630	  if (! CONST_OR_PURE_CALL_P (insn))
6631	    invalidate_memory ();
6632	  invalidate_for_call ();
6633	}
6634
6635      invalidate_from_clobbers (PATTERN (insn));
6636      note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6637    }
6638}
6639
6640/* If modifying X will modify the value in *DATA (which is really an
6641   `rtx *'), indicate that fact by setting the pointed to value to
6642   NULL_RTX.  */
6643
6644static void
6645cse_check_loop_start (x, set, data)
6646     rtx x;
6647     rtx set ATTRIBUTE_UNUSED;
6648     void *data;
6649{
6650  rtx *cse_check_loop_start_value = (rtx *) data;
6651
6652  if (*cse_check_loop_start_value == NULL_RTX
6653      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6654    return;
6655
6656  if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6657      || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6658    *cse_check_loop_start_value = NULL_RTX;
6659}
6660
6661/* X is a SET or CLOBBER contained in INSN that was found near the start of
6662   a loop that starts with the label at LOOP_START.
6663
6664   If X is a SET, we see if its SET_SRC is currently in our hash table.
6665   If so, we see if it has a value equal to some register used only in the
6666   loop exit code (as marked by jump.c).
6667
6668   If those two conditions are true, we search backwards from the start of
6669   the loop to see if that same value was loaded into a register that still
6670   retains its value at the start of the loop.
6671
6672   If so, we insert an insn after the load to copy the destination of that
6673   load into the equivalent register and (try to) replace our SET_SRC with that
6674   register.
6675
6676   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
6677
6678static void
6679cse_set_around_loop (x, insn, loop_start)
6680     rtx x;
6681     rtx insn;
6682     rtx loop_start;
6683{
6684  struct table_elt *src_elt;
6685
6686  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6687     are setting PC or CC0 or whose SET_SRC is already a register.  */
6688  if (GET_CODE (x) == SET
6689      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6690      && GET_CODE (SET_SRC (x)) != REG)
6691    {
6692      src_elt = lookup (SET_SRC (x),
6693			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6694			GET_MODE (SET_DEST (x)));
6695
6696      if (src_elt)
6697	for (src_elt = src_elt->first_same_value; src_elt;
6698	     src_elt = src_elt->next_same_value)
6699	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6700	      && COST (src_elt->exp) < COST (SET_SRC (x)))
6701	    {
6702	      rtx p, set;
6703
6704	      /* Look for an insn in front of LOOP_START that sets
6705		 something in the desired mode to SET_SRC (x) before we hit
6706		 a label or CALL_INSN.  */
6707
6708	      for (p = prev_nonnote_insn (loop_start);
6709		   p && GET_CODE (p) != CALL_INSN
6710		   && GET_CODE (p) != CODE_LABEL;
6711		   p = prev_nonnote_insn  (p))
6712		if ((set = single_set (p)) != 0
6713		    && GET_CODE (SET_DEST (set)) == REG
6714		    && GET_MODE (SET_DEST (set)) == src_elt->mode
6715		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6716		  {
6717		    /* We now have to ensure that nothing between P
6718		       and LOOP_START modified anything referenced in
6719		       SET_SRC (x).  We know that nothing within the loop
6720		       can modify it, or we would have invalidated it in
6721		       the hash table.  */
6722		    rtx q;
6723		    rtx cse_check_loop_start_value = SET_SRC (x);
6724		    for (q = p; q != loop_start; q = NEXT_INSN (q))
6725		      if (INSN_P (q))
6726			note_stores (PATTERN (q),
6727				     cse_check_loop_start,
6728				     &cse_check_loop_start_value);
6729
6730		    /* If nothing was changed and we can replace our
6731		       SET_SRC, add an insn after P to copy its destination
6732		       to what we will be replacing SET_SRC with.  */
6733		    if (cse_check_loop_start_value
6734			&& validate_change (insn, &SET_SRC (x),
6735					    src_elt->exp, 0))
6736		      {
6737			/* If this creates new pseudos, this is unsafe,
6738			   because the regno of new pseudo is unsuitable
6739			   to index into reg_qty when cse_insn processes
6740			   the new insn.  Therefore, if a new pseudo was
6741			   created, discard this optimization.  */
6742			int nregs = max_reg_num ();
6743			rtx move
6744			  = gen_move_insn (src_elt->exp, SET_DEST (set));
6745			if (nregs != max_reg_num ())
6746			  {
6747			    if (! validate_change (insn, &SET_SRC (x),
6748						   SET_SRC (set), 0))
6749			      abort ();
6750			  }
6751			else
6752			  emit_insn_after (move, p);
6753		      }
6754		    break;
6755		  }
6756	    }
6757    }
6758
6759  /* Deal with the destination of X affecting the stack pointer.  */
6760  addr_affects_sp_p (SET_DEST (x));
6761
6762  /* See comment on similar code in cse_insn for explanation of these
6763     tests.  */
6764  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6765      || GET_CODE (SET_DEST (x)) == MEM)
6766    invalidate (SET_DEST (x), VOIDmode);
6767  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6768	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6769    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6770}
6771
6772/* Find the end of INSN's basic block and return its range,
6773   the total number of SETs in all the insns of the block, the last insn of the
6774   block, and the branch path.
6775
6776   The branch path indicates which branches should be followed.  If a non-zero
6777   path size is specified, the block should be rescanned and a different set
6778   of branches will be taken.  The branch path is only used if
6779   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6780
6781   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6782   used to describe the block.  It is filled in with the information about
6783   the current block.  The incoming structure's branch path, if any, is used
6784   to construct the output branch path.  */
6785
6786void
6787cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6788     rtx insn;
6789     struct cse_basic_block_data *data;
6790     int follow_jumps;
6791     int after_loop;
6792     int skip_blocks;
6793{
6794  rtx p = insn, q;
6795  int nsets = 0;
6796  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6797  rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6798  int path_size = data->path_size;
6799  int path_entry = 0;
6800  int i;
6801
6802  /* Update the previous branch path, if any.  If the last branch was
6803     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
6804     shorten the path by one and look at the previous branch.  We know that
6805     at least one branch must have been taken if PATH_SIZE is non-zero.  */
6806  while (path_size > 0)
6807    {
6808      if (data->path[path_size - 1].status != NOT_TAKEN)
6809	{
6810	  data->path[path_size - 1].status = NOT_TAKEN;
6811	  break;
6812	}
6813      else
6814	path_size--;
6815    }
6816
6817  /* If the first instruction is marked with QImode, that means we've
6818     already processed this block.  Our caller will look at DATA->LAST
6819     to figure out where to go next.  We want to return the next block
6820     in the instruction stream, not some branched-to block somewhere
6821     else.  We accomplish this by pretending our called forbid us to
6822     follow jumps, or skip blocks.  */
6823  if (GET_MODE (insn) == QImode)
6824    follow_jumps = skip_blocks = 0;
6825
6826  /* Scan to end of this basic block.  */
6827  while (p && GET_CODE (p) != CODE_LABEL)
6828    {
6829      /* Don't cse out the end of a loop.  This makes a difference
6830	 only for the unusual loops that always execute at least once;
6831	 all other loops have labels there so we will stop in any case.
6832	 Cse'ing out the end of the loop is dangerous because it
6833	 might cause an invariant expression inside the loop
6834	 to be reused after the end of the loop.  This would make it
6835	 hard to move the expression out of the loop in loop.c,
6836	 especially if it is one of several equivalent expressions
6837	 and loop.c would like to eliminate it.
6838
6839	 If we are running after loop.c has finished, we can ignore
6840	 the NOTE_INSN_LOOP_END.  */
6841
6842      if (! after_loop && GET_CODE (p) == NOTE
6843	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6844	break;
6845
6846      /* Don't cse over a call to setjmp; on some machines (eg VAX)
6847	 the regs restored by the longjmp come from
6848	 a later time than the setjmp.  */
6849      if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6850	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6851	break;
6852
6853      /* A PARALLEL can have lots of SETs in it,
6854	 especially if it is really an ASM_OPERANDS.  */
6855      if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6856	nsets += XVECLEN (PATTERN (p), 0);
6857      else if (GET_CODE (p) != NOTE)
6858	nsets += 1;
6859
6860      /* Ignore insns made by CSE; they cannot affect the boundaries of
6861	 the basic block.  */
6862
6863      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6864	high_cuid = INSN_CUID (p);
6865      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6866	low_cuid = INSN_CUID (p);
6867
6868      /* See if this insn is in our branch path.  If it is and we are to
6869	 take it, do so.  */
6870      if (path_entry < path_size && data->path[path_entry].branch == p)
6871	{
6872	  if (data->path[path_entry].status != NOT_TAKEN)
6873	    p = JUMP_LABEL (p);
6874
6875	  /* Point to next entry in path, if any.  */
6876	  path_entry++;
6877	}
6878
6879      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6880	 was specified, we haven't reached our maximum path length, there are
6881	 insns following the target of the jump, this is the only use of the
6882	 jump label, and the target label is preceded by a BARRIER.
6883
6884	 Alternatively, we can follow the jump if it branches around a
6885	 block of code and there are no other branches into the block.
6886	 In this case invalidate_skipped_block will be called to invalidate any
6887	 registers set in the block when following the jump.  */
6888
6889      else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6890	       && GET_CODE (p) == JUMP_INSN
6891	       && GET_CODE (PATTERN (p)) == SET
6892	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6893	       && JUMP_LABEL (p) != 0
6894	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6895	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6896	{
6897	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6898	    if ((GET_CODE (q) != NOTE
6899		 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6900		 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6901		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6902		&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6903	      break;
6904
6905	  /* If we ran into a BARRIER, this code is an extension of the
6906	     basic block when the branch is taken.  */
6907	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6908	    {
6909	      /* Don't allow ourself to keep walking around an
6910		 always-executed loop.  */
6911	      if (next_real_insn (q) == next)
6912		{
6913		  p = NEXT_INSN (p);
6914		  continue;
6915		}
6916
6917	      /* Similarly, don't put a branch in our path more than once.  */
6918	      for (i = 0; i < path_entry; i++)
6919		if (data->path[i].branch == p)
6920		  break;
6921
6922	      if (i != path_entry)
6923		break;
6924
6925	      data->path[path_entry].branch = p;
6926	      data->path[path_entry++].status = TAKEN;
6927
6928	      /* This branch now ends our path.  It was possible that we
6929		 didn't see this branch the last time around (when the
6930		 insn in front of the target was a JUMP_INSN that was
6931		 turned into a no-op).  */
6932	      path_size = path_entry;
6933
6934	      p = JUMP_LABEL (p);
6935	      /* Mark block so we won't scan it again later.  */
6936	      PUT_MODE (NEXT_INSN (p), QImode);
6937	    }
6938	  /* Detect a branch around a block of code.  */
6939	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6940	    {
6941	      rtx tmp;
6942
6943	      if (next_real_insn (q) == next)
6944		{
6945		  p = NEXT_INSN (p);
6946		  continue;
6947		}
6948
6949	      for (i = 0; i < path_entry; i++)
6950		if (data->path[i].branch == p)
6951		  break;
6952
6953	      if (i != path_entry)
6954		break;
6955
6956	      /* This is no_labels_between_p (p, q) with an added check for
6957		 reaching the end of a function (in case Q precedes P).  */
6958	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6959		if (GET_CODE (tmp) == CODE_LABEL)
6960		  break;
6961
6962	      if (tmp == q)
6963		{
6964		  data->path[path_entry].branch = p;
6965		  data->path[path_entry++].status = AROUND;
6966
6967		  path_size = path_entry;
6968
6969		  p = JUMP_LABEL (p);
6970		  /* Mark block so we won't scan it again later.  */
6971		  PUT_MODE (NEXT_INSN (p), QImode);
6972		}
6973	    }
6974	}
6975      p = NEXT_INSN (p);
6976    }
6977
6978  data->low_cuid = low_cuid;
6979  data->high_cuid = high_cuid;
6980  data->nsets = nsets;
6981  data->last = p;
6982
6983  /* If all jumps in the path are not taken, set our path length to zero
6984     so a rescan won't be done.  */
6985  for (i = path_size - 1; i >= 0; i--)
6986    if (data->path[i].status != NOT_TAKEN)
6987      break;
6988
6989  if (i == -1)
6990    data->path_size = 0;
6991  else
6992    data->path_size = path_size;
6993
6994  /* End the current branch path.  */
6995  data->path[path_size].branch = 0;
6996}
6997
6998/* Perform cse on the instructions of a function.
6999   F is the first instruction.
7000   NREGS is one plus the highest pseudo-reg number used in the instruction.
7001
7002   AFTER_LOOP is 1 if this is the cse call done after loop optimization
7003   (only if -frerun-cse-after-loop).
7004
7005   Returns 1 if jump_optimize should be redone due to simplifications
7006   in conditional jump instructions.  */
7007
7008int
7009cse_main (f, nregs, after_loop, file)
7010     rtx f;
7011     int nregs;
7012     int after_loop;
7013     FILE *file;
7014{
7015  struct cse_basic_block_data val;
7016  rtx insn = f;
7017  int i;
7018
7019  cse_jumps_altered = 0;
7020  recorded_label_ref = 0;
7021  constant_pool_entries_cost = 0;
7022  val.path_size = 0;
7023
7024  init_recog ();
7025  init_alias_analysis ();
7026
7027  max_reg = nregs;
7028
7029  max_insn_uid = get_max_uid ();
7030
7031  reg_eqv_table = (struct reg_eqv_elem *)
7032    xmalloc (nregs * sizeof (struct reg_eqv_elem));
7033
7034#ifdef LOAD_EXTEND_OP
7035
7036  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
7037     and change the code and mode as appropriate.  */
7038  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7039#endif
7040
7041  /* Reset the counter indicating how many elements have been made
7042     thus far.  */
7043  n_elements_made = 0;
7044
7045  /* Find the largest uid.  */
7046
7047  max_uid = get_max_uid ();
7048  uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7049
7050  /* Compute the mapping from uids to cuids.
7051     CUIDs are numbers assigned to insns, like uids,
7052     except that cuids increase monotonically through the code.
7053     Don't assign cuids to line-number NOTEs, so that the distance in cuids
7054     between two insns is not affected by -g.  */
7055
7056  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7057    {
7058      if (GET_CODE (insn) != NOTE
7059	  || NOTE_LINE_NUMBER (insn) < 0)
7060	INSN_CUID (insn) = ++i;
7061      else
7062	/* Give a line number note the same cuid as preceding insn.  */
7063	INSN_CUID (insn) = i;
7064    }
7065
7066  ggc_push_context ();
7067
7068  /* Loop over basic blocks.
7069     Compute the maximum number of qty's needed for each basic block
7070     (which is 2 for each SET).  */
7071  insn = f;
7072  while (insn)
7073    {
7074      cse_altered = 0;
7075      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7076			      flag_cse_skip_blocks);
7077
7078      /* If this basic block was already processed or has no sets, skip it.  */
7079      if (val.nsets == 0 || GET_MODE (insn) == QImode)
7080	{
7081	  PUT_MODE (insn, VOIDmode);
7082	  insn = (val.last ? NEXT_INSN (val.last) : 0);
7083	  val.path_size = 0;
7084	  continue;
7085	}
7086
7087      cse_basic_block_start = val.low_cuid;
7088      cse_basic_block_end = val.high_cuid;
7089      max_qty = val.nsets * 2;
7090
7091      if (file)
7092	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7093		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7094		 val.nsets);
7095
7096      /* Make MAX_QTY bigger to give us room to optimize
7097	 past the end of this basic block, if that should prove useful.  */
7098      if (max_qty < 500)
7099	max_qty = 500;
7100
7101      max_qty += max_reg;
7102
7103      /* If this basic block is being extended by following certain jumps,
7104         (see `cse_end_of_basic_block'), we reprocess the code from the start.
7105         Otherwise, we start after this basic block.  */
7106      if (val.path_size > 0)
7107	cse_basic_block (insn, val.last, val.path, 0);
7108      else
7109	{
7110	  int old_cse_jumps_altered = cse_jumps_altered;
7111	  rtx temp;
7112
7113	  /* When cse changes a conditional jump to an unconditional
7114	     jump, we want to reprocess the block, since it will give
7115	     us a new branch path to investigate.  */
7116	  cse_jumps_altered = 0;
7117	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7118	  if (cse_jumps_altered == 0
7119	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7120	    insn = temp;
7121
7122	  cse_jumps_altered |= old_cse_jumps_altered;
7123	}
7124
7125      if (cse_altered)
7126	ggc_collect ();
7127
7128#ifdef USE_C_ALLOCA
7129      alloca (0);
7130#endif
7131    }
7132
7133  ggc_pop_context ();
7134
7135  if (max_elements_made < n_elements_made)
7136    max_elements_made = n_elements_made;
7137
7138  /* Clean up.  */
7139  end_alias_analysis ();
7140  free (uid_cuid);
7141  free (reg_eqv_table);
7142
7143  return cse_jumps_altered || recorded_label_ref;
7144}
7145
7146/* Process a single basic block.  FROM and TO and the limits of the basic
7147   block.  NEXT_BRANCH points to the branch path when following jumps or
7148   a null path when not following jumps.
7149
7150   AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7151   loop.  This is true when we are being called for the last time on a
7152   block and this CSE pass is before loop.c.  */
7153
7154static rtx
7155cse_basic_block (from, to, next_branch, around_loop)
7156     rtx from, to;
7157     struct branch_path *next_branch;
7158     int around_loop;
7159{
7160  rtx insn;
7161  int to_usage = 0;
7162  rtx libcall_insn = NULL_RTX;
7163  int num_insns = 0;
7164
7165  /* This array is undefined before max_reg, so only allocate
7166     the space actually needed and adjust the start.  */
7167
7168  qty_table
7169    = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7170					 * sizeof (struct qty_table_elem));
7171  qty_table -= max_reg;
7172
7173  new_basic_block ();
7174
7175  /* TO might be a label.  If so, protect it from being deleted.  */
7176  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7177    ++LABEL_NUSES (to);
7178
7179  for (insn = from; insn != to; insn = NEXT_INSN (insn))
7180    {
7181      enum rtx_code code = GET_CODE (insn);
7182
7183      /* If we have processed 1,000 insns, flush the hash table to
7184	 avoid extreme quadratic behavior.  We must not include NOTEs
7185	 in the count since there may be more of them when generating
7186	 debugging information.  If we clear the table at different
7187	 times, code generated with -g -O might be different than code
7188	 generated with -O but not -g.
7189
7190	 ??? This is a real kludge and needs to be done some other way.
7191	 Perhaps for 2.9.  */
7192      if (code != NOTE && num_insns++ > 1000)
7193	{
7194	  flush_hash_table ();
7195	  num_insns = 0;
7196	}
7197
7198      /* See if this is a branch that is part of the path.  If so, and it is
7199	 to be taken, do so.  */
7200      if (next_branch->branch == insn)
7201	{
7202	  enum taken status = next_branch++->status;
7203	  if (status != NOT_TAKEN)
7204	    {
7205	      if (status == TAKEN)
7206		record_jump_equiv (insn, 1);
7207	      else
7208		invalidate_skipped_block (NEXT_INSN (insn));
7209
7210	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7211		 Then follow this branch.  */
7212#ifdef HAVE_cc0
7213	      prev_insn_cc0 = 0;
7214#endif
7215	      prev_insn = insn;
7216	      insn = JUMP_LABEL (insn);
7217	      continue;
7218	    }
7219	}
7220
7221      if (GET_MODE (insn) == QImode)
7222	PUT_MODE (insn, VOIDmode);
7223
7224      if (GET_RTX_CLASS (code) == 'i')
7225	{
7226	  rtx p;
7227
7228	  /* Process notes first so we have all notes in canonical forms when
7229	     looking for duplicate operations.  */
7230
7231	  if (REG_NOTES (insn))
7232	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7233
7234	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7235	     we do not want to record destinations.  The last insn of a
7236	     LIBCALL block is not considered to be part of the block, since
7237	     its destination is the result of the block and hence should be
7238	     recorded.  */
7239
7240	  if (REG_NOTES (insn) != 0)
7241	    {
7242	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7243		libcall_insn = XEXP (p, 0);
7244	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7245		libcall_insn = 0;
7246	    }
7247
7248	  cse_insn (insn, libcall_insn);
7249
7250	  /* If we haven't already found an insn where we added a LABEL_REF,
7251	     check this one.  */
7252	  if (GET_CODE (insn) == INSN && ! recorded_label_ref
7253	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7254			       (void *) insn))
7255	    recorded_label_ref = 1;
7256	}
7257
7258      /* If INSN is now an unconditional jump, skip to the end of our
7259	 basic block by pretending that we just did the last insn in the
7260	 basic block.  If we are jumping to the end of our block, show
7261	 that we can have one usage of TO.  */
7262
7263      if (any_uncondjump_p (insn))
7264	{
7265	  if (to == 0)
7266	    {
7267	      free (qty_table + max_reg);
7268	      return 0;
7269	    }
7270
7271	  if (JUMP_LABEL (insn) == to)
7272	    to_usage = 1;
7273
7274	  /* Maybe TO was deleted because the jump is unconditional.
7275	     If so, there is nothing left in this basic block.  */
7276	  /* ??? Perhaps it would be smarter to set TO
7277	     to whatever follows this insn,
7278	     and pretend the basic block had always ended here.  */
7279	  if (INSN_DELETED_P (to))
7280	    break;
7281
7282	  insn = PREV_INSN (to);
7283	}
7284
7285      /* See if it is ok to keep on going past the label
7286	 which used to end our basic block.  Remember that we incremented
7287	 the count of that label, so we decrement it here.  If we made
7288	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7289	 want to count the use in that jump.  */
7290
7291      if (to != 0 && NEXT_INSN (insn) == to
7292	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7293	{
7294	  struct cse_basic_block_data val;
7295	  rtx prev;
7296
7297	  insn = NEXT_INSN (to);
7298
7299	  /* If TO was the last insn in the function, we are done.  */
7300	  if (insn == 0)
7301	    {
7302	      free (qty_table + max_reg);
7303	      return 0;
7304	    }
7305
7306	  /* If TO was preceded by a BARRIER we are done with this block
7307	     because it has no continuation.  */
7308	  prev = prev_nonnote_insn (to);
7309	  if (prev && GET_CODE (prev) == BARRIER)
7310	    {
7311	      free (qty_table + max_reg);
7312	      return insn;
7313	    }
7314
7315	  /* Find the end of the following block.  Note that we won't be
7316	     following branches in this case.  */
7317	  to_usage = 0;
7318	  val.path_size = 0;
7319	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
7320
7321	  /* If the tables we allocated have enough space left
7322	     to handle all the SETs in the next basic block,
7323	     continue through it.  Otherwise, return,
7324	     and that block will be scanned individually.  */
7325	  if (val.nsets * 2 + next_qty > max_qty)
7326	    break;
7327
7328	  cse_basic_block_start = val.low_cuid;
7329	  cse_basic_block_end = val.high_cuid;
7330	  to = val.last;
7331
7332	  /* Prevent TO from being deleted if it is a label.  */
7333	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7334	    ++LABEL_NUSES (to);
7335
7336	  /* Back up so we process the first insn in the extension.  */
7337	  insn = PREV_INSN (insn);
7338	}
7339    }
7340
7341  if (next_qty > max_qty)
7342    abort ();
7343
7344  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7345     the previous insn is the only insn that branches to the head of a loop,
7346     we can cse into the loop.  Don't do this if we changed the jump
7347     structure of a loop unless we aren't going to be following jumps.  */
7348
7349  insn = prev_nonnote_insn(to);
7350  if ((cse_jumps_altered == 0
7351       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7352      && around_loop && to != 0
7353      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7354      && GET_CODE (insn) == JUMP_INSN
7355      && JUMP_LABEL (insn) != 0
7356      && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7357    cse_around_loop (JUMP_LABEL (insn));
7358
7359  free (qty_table + max_reg);
7360
7361  return to ? NEXT_INSN (to) : 0;
7362}
7363
7364/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7365   there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7366
7367static int
7368check_for_label_ref (rtl, data)
7369     rtx *rtl;
7370     void *data;
7371{
7372  rtx insn = (rtx) data;
7373
7374  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7375     we must rerun jump since it needs to place the note.  If this is a
7376     LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7377     since no REG_LABEL will be added.  */
7378  return (GET_CODE (*rtl) == LABEL_REF
7379	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7380	  && LABEL_P (XEXP (*rtl, 0))
7381	  && INSN_UID (XEXP (*rtl, 0)) != 0
7382	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7383}
7384
7385/* Count the number of times registers are used (not set) in X.
7386   COUNTS is an array in which we accumulate the count, INCR is how much
7387   we count each register usage.
7388
7389   Don't count a usage of DEST, which is the SET_DEST of a SET which
7390   contains X in its SET_SRC.  This is because such a SET does not
7391   modify the liveness of DEST.  */
7392
7393static void
7394count_reg_usage (x, counts, dest, incr)
7395     rtx x;
7396     int *counts;
7397     rtx dest;
7398     int incr;
7399{
7400  enum rtx_code code;
7401  const char *fmt;
7402  int i, j;
7403
7404  if (x == 0)
7405    return;
7406
7407  switch (code = GET_CODE (x))
7408    {
7409    case REG:
7410      if (x != dest)
7411	counts[REGNO (x)] += incr;
7412      return;
7413
7414    case PC:
7415    case CC0:
7416    case CONST:
7417    case CONST_INT:
7418    case CONST_DOUBLE:
7419    case SYMBOL_REF:
7420    case LABEL_REF:
7421      return;
7422
7423    case CLOBBER:
7424      /* If we are clobbering a MEM, mark any registers inside the address
7425         as being used.  */
7426      if (GET_CODE (XEXP (x, 0)) == MEM)
7427	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7428      return;
7429
7430    case SET:
7431      /* Unless we are setting a REG, count everything in SET_DEST.  */
7432      if (GET_CODE (SET_DEST (x)) != REG)
7433	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7434
7435      /* If SRC has side-effects, then we can't delete this insn, so the
7436	 usage of SET_DEST inside SRC counts.
7437
7438	 ??? Strictly-speaking, we might be preserving this insn
7439	 because some other SET has side-effects, but that's hard
7440	 to do and can't happen now.  */
7441      count_reg_usage (SET_SRC (x), counts,
7442		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7443		       incr);
7444      return;
7445
7446    case CALL_INSN:
7447      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7448      /* Fall through.  */
7449
7450    case INSN:
7451    case JUMP_INSN:
7452      count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7453
7454      /* Things used in a REG_EQUAL note aren't dead since loop may try to
7455	 use them.  */
7456
7457      count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7458      return;
7459
7460    case EXPR_LIST:
7461    case INSN_LIST:
7462      if (REG_NOTE_KIND (x) == REG_EQUAL
7463	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7464	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7465      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7466      return;
7467
7468    default:
7469      break;
7470    }
7471
7472  fmt = GET_RTX_FORMAT (code);
7473  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7474    {
7475      if (fmt[i] == 'e')
7476	count_reg_usage (XEXP (x, i), counts, dest, incr);
7477      else if (fmt[i] == 'E')
7478	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7479	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7480    }
7481}
7482
7483/* Return true if set is live.  */
7484static bool
7485set_live_p (set, insn, counts)
7486     rtx set;
7487     rtx insn ATTRIBUTE_UNUSED;	/* Only used with HAVE_cc0.  */
7488     int *counts;
7489{
7490#ifdef HAVE_cc0
7491  rtx tem;
7492#endif
7493
7494  if (set_noop_p (set))
7495    ;
7496
7497#ifdef HAVE_cc0
7498  else if (GET_CODE (SET_DEST (set)) == CC0
7499	   && !side_effects_p (SET_SRC (set))
7500	   && ((tem = next_nonnote_insn (insn)) == 0
7501	       || !INSN_P (tem)
7502	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7503    return false;
7504#endif
7505  else if (GET_CODE (SET_DEST (set)) != REG
7506	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7507	   || counts[REGNO (SET_DEST (set))] != 0
7508	   || side_effects_p (SET_SRC (set))
7509	   /* An ADDRESSOF expression can turn into a use of the
7510	      internal arg pointer, so always consider the
7511	      internal arg pointer live.  If it is truly dead,
7512	      flow will delete the initializing insn.  */
7513	   || (SET_DEST (set) == current_function_internal_arg_pointer))
7514    return true;
7515  return false;
7516}
7517
7518/* Return true if insn is live.  */
7519
7520static bool
7521insn_live_p (insn, counts)
7522     rtx insn;
7523     int *counts;
7524{
7525  int i;
7526  if (GET_CODE (PATTERN (insn)) == SET)
7527    return set_live_p (PATTERN (insn), insn, counts);
7528  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7529    {
7530      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7531	{
7532	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7533
7534	  if (GET_CODE (elt) == SET)
7535	    {
7536	      if (set_live_p (elt, insn, counts))
7537		return true;
7538	    }
7539	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7540	    return true;
7541	}
7542      return false;
7543    }
7544  else
7545    return true;
7546}
7547
7548/* Return true if libcall is dead as a whole.  */
7549
7550static bool
7551dead_libcall_p (insn)
7552     rtx insn;
7553{
7554  rtx note;
7555  /* See if there's a REG_EQUAL note on this insn and try to
7556     replace the source with the REG_EQUAL expression.
7557
7558     We assume that insns with REG_RETVALs can only be reg->reg
7559     copies at this point.  */
7560  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7561  if (note)
7562    {
7563      rtx set = single_set (insn);
7564      rtx new = simplify_rtx (XEXP (note, 0));
7565
7566      if (!new)
7567	new = XEXP (note, 0);
7568
7569      if (set && validate_change (insn, &SET_SRC (set), new, 0))
7570	{
7571	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7572	  return true;
7573	}
7574    }
7575  return false;
7576}
7577
7578/* Scan all the insns and delete any that are dead; i.e., they store a register
7579   that is never used or they copy a register to itself.
7580
7581   This is used to remove insns made obviously dead by cse, loop or other
7582   optimizations.  It improves the heuristics in loop since it won't try to
7583   move dead invariants out of loops or make givs for dead quantities.  The
7584   remaining passes of the compilation are also sped up.  */
7585
7586void
7587delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
7588     rtx insns;
7589     int nreg;
7590     int preserve_basic_blocks;
7591{
7592  int *counts;
7593  rtx insn, prev;
7594  int i;
7595  int in_libcall = 0, dead_libcall = 0;
7596  basic_block bb;
7597
7598  /* First count the number of times each register is used.  */
7599  counts = (int *) xcalloc (nreg, sizeof (int));
7600  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7601    count_reg_usage (insn, counts, NULL_RTX, 1);
7602
7603  /* Go from the last insn to the first and delete insns that only set unused
7604     registers or copy a register to itself.  As we delete an insn, remove
7605     usage counts for registers it uses.
7606
7607     The first jump optimization pass may leave a real insn as the last
7608     insn in the function.   We must not skip that insn or we may end
7609     up deleting code that is not really dead.  */
7610  insn = get_last_insn ();
7611  if (! INSN_P (insn))
7612    insn = prev_real_insn (insn);
7613
7614  if (!preserve_basic_blocks)
7615    for (; insn; insn = prev)
7616      {
7617	int live_insn = 0;
7618
7619	prev = prev_real_insn (insn);
7620
7621	/* Don't delete any insns that are part of a libcall block unless
7622	   we can delete the whole libcall block.
7623
7624	   Flow or loop might get confused if we did that.  Remember
7625	   that we are scanning backwards.  */
7626	if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7627	  {
7628	    in_libcall = 1;
7629	    live_insn = 1;
7630	    dead_libcall = dead_libcall_p (insn);
7631	  }
7632	else if (in_libcall)
7633	  live_insn = ! dead_libcall;
7634	else
7635	  live_insn = insn_live_p (insn, counts);
7636
7637	/* If this is a dead insn, delete it and show registers in it aren't
7638	   being used.  */
7639
7640	if (! live_insn)
7641	  {
7642	    count_reg_usage (insn, counts, NULL_RTX, -1);
7643	    delete_related_insns (insn);
7644	  }
7645
7646	if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7647	  {
7648	    in_libcall = 0;
7649	    dead_libcall = 0;
7650	  }
7651      }
7652  else
7653    for (i = 0; i < n_basic_blocks; i++)
7654      for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
7655	{
7656	  int live_insn = 0;
7657
7658	  prev = PREV_INSN (insn);
7659	  if (!INSN_P (insn))
7660	    continue;
7661
7662	  /* Don't delete any insns that are part of a libcall block unless
7663	     we can delete the whole libcall block.
7664
7665	     Flow or loop might get confused if we did that.  Remember
7666	     that we are scanning backwards.  */
7667	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7668	    {
7669	      in_libcall = 1;
7670	      live_insn = 1;
7671	      dead_libcall = dead_libcall_p (insn);
7672	    }
7673	  else if (in_libcall)
7674	    live_insn = ! dead_libcall;
7675	  else
7676	    live_insn = insn_live_p (insn, counts);
7677
7678	  /* If this is a dead insn, delete it and show registers in it aren't
7679	     being used.  */
7680
7681	  if (! live_insn)
7682	    {
7683	      count_reg_usage (insn, counts, NULL_RTX, -1);
7684	      delete_insn (insn);
7685	    }
7686
7687	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7688	    {
7689	      in_libcall = 0;
7690	      dead_libcall = 0;
7691	    }
7692	}
7693
7694  /* Clean up.  */
7695  free (counts);
7696}
7697