cse.c revision 96263
198944Sobrien/* Common subexpression elimination for GNU compiler.
298944Sobrien   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
398944Sobrien   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
498944Sobrien
598944SobrienThis file is part of GCC.
698944Sobrien
798944SobrienGCC is free software; you can redistribute it and/or modify it under
898944Sobrienthe terms of the GNU General Public License as published by the Free
998944SobrienSoftware Foundation; either version 2, or (at your option) any later
1098944Sobrienversion.
1198944Sobrien
1298944SobrienGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1398944SobrienWARRANTY; without even the implied warranty of MERCHANTABILITY or
1498944SobrienFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1598944Sobrienfor more details.
1698944Sobrien
1798944SobrienYou should have received a copy of the GNU General Public License
1898944Sobrienalong with GCC; see the file COPYING.  If not, write to the Free
1998944SobrienSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
2098944Sobrien02111-1307, USA.  */
2198944Sobrien
2298944Sobrien#include "config.h"
2398944Sobrien/* stdio.h must precede rtl.h for FFS.  */
2498944Sobrien#include "system.h"
2598944Sobrien
26130803Smarcel#include "rtl.h"
2798944Sobrien#include "tm_p.h"
28130803Smarcel#include "regs.h"
29130803Smarcel#include "hard-reg-set.h"
3098944Sobrien#include "basic-block.h"
3198944Sobrien#include "flags.h"
3298944Sobrien#include "real.h"
3398944Sobrien#include "insn-config.h"
3498944Sobrien#include "recog.h"
35130803Smarcel#include "function.h"
36130803Smarcel#include "expr.h"
3798944Sobrien#include "toplev.h"
3898944Sobrien#include "output.h"
3998944Sobrien#include "ggc.h"
4098944Sobrien
4198944Sobrien/* The basic idea of common subexpression elimination is to go
4298944Sobrien   through the code, keeping a record of expressions that would
4398944Sobrien   have the same value at the current scan point, and replacing
44130803Smarcel   expressions encountered with the cheapest equivalent expression.
45130803Smarcel
4698944Sobrien   It is too complicated to keep track of the different possibilities
4798944Sobrien   when control paths merge in this code; so, at each label, we forget all
4898944Sobrien   that is known and start fresh.  This can be described as processing each
4998944Sobrien   extended basic block separately.  We have a separate pass to perform
5098944Sobrien   global CSE.
5198944Sobrien
5298944Sobrien   Note CSE can turn a conditional or computed jump into a nop or
5398944Sobrien   an unconditional jump.  When this occurs we arrange to run the jump
5498944Sobrien   optimizer after CSE to delete the unreachable code.
55130803Smarcel
5698944Sobrien   We use two data structures to record the equivalent expressions:
5798944Sobrien   a hash table for most expressions, and a vector of "quantity
5898944Sobrien   numbers" to record equivalent (pseudo) registers.
5998944Sobrien
6098944Sobrien   The use of the special data structure for registers is desirable
6198944Sobrien   because it is faster.  It is possible because registers references
6298944Sobrien   contain a fairly small number, the register number, taken from
6398944Sobrien   a contiguously allocated series, and two register references are
6498944Sobrien   identical if they have the same number.  General expressions
6598944Sobrien   do not have any such thing, so the only way to retrieve the
6698944Sobrien   information recorded on an expression other than a register
6798944Sobrien   is to keep it in a hash table.
6898944Sobrien
6998944SobrienRegisters and "quantity numbers":
7098944Sobrien
7198944Sobrien   At the start of each basic block, all of the (hardware and pseudo)
7298944Sobrien   registers used in the function are given distinct quantity
7398944Sobrien   numbers to indicate their contents.  During scan, when the code
7498944Sobrien   copies one register into another, we copy the quantity number.
7598944Sobrien   When a register is loaded in any other way, we allocate a new
7698944Sobrien   quantity number to describe the value generated by this operation.
7798944Sobrien   `reg_qty' records what quantity a register is currently thought
7898944Sobrien   of as containing.
7998944Sobrien
8098944Sobrien   All real quantity numbers are greater than or equal to `max_reg'.
8198944Sobrien   If register N has not been assigned a quantity, reg_qty[N] will equal N.
8298944Sobrien
8398944Sobrien   Quantity numbers below `max_reg' do not exist and none of the `qty_table'
8498944Sobrien   entries should be referenced with an index below `max_reg'.
8598944Sobrien
8698944Sobrien   We also maintain a bidirectional chain of registers for each
8798944Sobrien   quantity number.  The `qty_table` members `first_reg' and `last_reg',
8898944Sobrien   and `reg_eqv_table' members `next' and `prev' hold these chains.
8998944Sobrien
9098944Sobrien   The first register in a chain is the one whose lifespan is least local.
9198944Sobrien   Among equals, it is the one that was seen first.
9298944Sobrien   We replace any equivalent register with that one.
9398944Sobrien
9498944Sobrien   If two registers have the same quantity number, it must be true that
9598944Sobrien   REG expressions with qty_table `mode' must be in the hash table for both
9698944Sobrien   registers and must be in the same class.
9798944Sobrien
9898944Sobrien   The converse is not true.  Since hard registers may be referenced in
9998944Sobrien   any mode, two REG expressions might be equivalent in the hash table
10098944Sobrien   but not have the same quantity number if the quantity number of one
101130803Smarcel   of the registers is not the same mode as those expressions.
10298944Sobrien
10398944SobrienConstants and quantity numbers
10498944Sobrien
10598944Sobrien   When a quantity has a known constant value, that value is stored
10698944Sobrien   in the appropriate qty_table `const_rtx'.  This is in addition to
10798944Sobrien   putting the constant in the hash table as is usual for non-regs.
10898944Sobrien
10998944Sobrien   Whether a reg or a constant is preferred is determined by the configuration
11098944Sobrien   macro CONST_COSTS and will often depend on the constant value.  In any
11198944Sobrien   event, expressions containing constants can be simplified, by fold_rtx.
11298944Sobrien
11398944Sobrien   When a quantity has a known nearly constant value (such as an address
11498944Sobrien   of a stack slot), that value is stored in the appropriate qty_table
11598944Sobrien   `const_rtx'.
11698944Sobrien
11798944Sobrien   Integer constants don't have a machine mode.  However, cse
11898944Sobrien   determines the intended machine mode from the destination
11998944Sobrien   of the instruction that moves the constant.  The machine mode
12098944Sobrien   is recorded in the hash table along with the actual RTL
12198944Sobrien   constant expression so that different modes are kept separate.
12298944Sobrien
12398944SobrienOther expressions:
12498944Sobrien
12598944Sobrien   To record known equivalences among expressions in general
12698944Sobrien   we use a hash table called `table'.  It has a fixed number of buckets
12798944Sobrien   that contain chains of `struct table_elt' elements for expressions.
12898944Sobrien   These chains connect the elements whose expressions have the same
12998944Sobrien   hash codes.
13098944Sobrien
13198944Sobrien   Other chains through the same elements connect the elements which
132130803Smarcel   currently have equivalent values.
13398944Sobrien
13498944Sobrien   Register references in an expression are canonicalized before hashing
13598944Sobrien   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
13698944Sobrien   The hash code of a register reference is computed using the quantity
13798944Sobrien   number, not the register number.
13898944Sobrien
13998944Sobrien   When the value of an expression changes, it is necessary to remove from the
14098944Sobrien   hash table not just that expression but all expressions whose values
14198944Sobrien   could be different as a result.
14298944Sobrien
14398944Sobrien     1. If the value changing is in memory, except in special cases
14498944Sobrien     ANYTHING referring to memory could be changed.  That is because
14598944Sobrien     nobody knows where a pointer does not point.
14698944Sobrien     The function `invalidate_memory' removes what is necessary.
14798944Sobrien
14898944Sobrien     The special cases are when the address is constant or is
14998944Sobrien     a constant plus a fixed register such as the frame pointer
15098944Sobrien     or a static chain pointer.  When such addresses are stored in,
15198944Sobrien     we can tell exactly which other such addresses must be invalidated
15298944Sobrien     due to overlap.  `invalidate' does this.
15398944Sobrien     All expressions that refer to non-constant
15498944Sobrien     memory addresses are also invalidated.  `invalidate_memory' does this.
15598944Sobrien
15698944Sobrien     2. If the value changing is a register, all expressions
15798944Sobrien     containing references to that register, and only those,
15898944Sobrien     must be removed.
15998944Sobrien
16098944Sobrien   Because searching the entire hash table for expressions that contain
16198944Sobrien   a register is very slow, we try to figure out when it isn't necessary.
16298944Sobrien   Precisely, this is necessary only when expressions have been
16398944Sobrien   entered in the hash table using this register, and then the value has
16498944Sobrien   changed, and then another expression wants to be added to refer to
16598944Sobrien   the register's new value.  This sequence of circumstances is rare
16698944Sobrien   within any one basic block.
16798944Sobrien
16898944Sobrien   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
16998944Sobrien   reg_tick[i] is incremented whenever a value is stored in register i.
17098944Sobrien   reg_in_table[i] holds -1 if no references to register i have been
17198944Sobrien   entered in the table; otherwise, it contains the value reg_tick[i] had
17298944Sobrien   when the references were entered.  If we want to enter a reference
17398944Sobrien   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
17498944Sobrien   Until we want to enter a new entry, the mere fact that the two vectors
17598944Sobrien   don't match makes the entries be ignored if anyone tries to match them.
17698944Sobrien
17798944Sobrien   Registers themselves are entered in the hash table as well as in
17898944Sobrien   the equivalent-register chains.  However, the vectors `reg_tick'
17998944Sobrien   and `reg_in_table' do not apply to expressions which are simple
18098944Sobrien   register references.  These expressions are removed from the table
18198944Sobrien   immediately when they become invalid, and this can be done even if
18298944Sobrien   we do not immediately search for all the expressions that refer to
18398944Sobrien   the register.
18498944Sobrien
18598944Sobrien   A CLOBBER rtx in an instruction invalidates its operand for further
18698944Sobrien   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
18798944Sobrien   invalidates everything that resides in memory.
18898944Sobrien
18998944SobrienRelated expressions:
19098944Sobrien
19198944Sobrien   Constant expressions that differ only by an additive integer
19298944Sobrien   are called related.  When a constant expression is put in
19398944Sobrien   the table, the related expression with no constant term
19498944Sobrien   is also entered.  These are made to point at each other
19598944Sobrien   so that it is possible to find out if there exists any
19698944Sobrien   register equivalent to an expression related to a given expression.  */
19798944Sobrien
19898944Sobrien/* One plus largest register number used in this function.  */
19998944Sobrien
20098944Sobrienstatic int max_reg;
20198944Sobrien
20298944Sobrien/* One plus largest instruction UID used in this function at time of
20398944Sobrien   cse_main call.  */
20498944Sobrien
20598944Sobrienstatic int max_insn_uid;
20698944Sobrien
20798944Sobrien/* Length of qty_table vector.  We know in advance we will not need
20898944Sobrien   a quantity number this big.  */
20998944Sobrien
21098944Sobrienstatic int max_qty;
21198944Sobrien
21298944Sobrien/* Next quantity number to be allocated.
21398944Sobrien   This is 1 + the largest number needed so far.  */
21498944Sobrien
21598944Sobrienstatic int next_qty;
21698944Sobrien
21798944Sobrien/* Per-qty information tracking.
21898944Sobrien
21998944Sobrien   `first_reg' and `last_reg' track the head and tail of the
22098944Sobrien   chain of registers which currently contain this quantity.
22198944Sobrien
22298944Sobrien   `mode' contains the machine mode of this quantity.
22398944Sobrien
22498944Sobrien   `const_rtx' holds the rtx of the constant value of this
22598944Sobrien   quantity, if known.  A summations of the frame/arg pointer
22698944Sobrien   and a constant can also be entered here.  When this holds
22798944Sobrien   a known value, `const_insn' is the insn which stored the
22898944Sobrien   constant value.
22998944Sobrien
23098944Sobrien   `comparison_{code,const,qty}' are used to track when a
23198944Sobrien   comparison between a quantity and some constant or register has
23298944Sobrien   been passed.  In such a case, we know the results of the comparison
23398944Sobrien   in case we see it again.  These members record a comparison that
23498944Sobrien   is known to be true.  `comparison_code' holds the rtx code of such
23598944Sobrien   a comparison, else it is set to UNKNOWN and the other two
23698944Sobrien   comparison members are undefined.  `comparison_const' holds
23798944Sobrien   the constant being compared against, or zero if the comparison
23898944Sobrien   is not against a constant.  `comparison_qty' holds the quantity
23998944Sobrien   being compared against when the result is known.  If the comparison
24098944Sobrien   is not with a register, `comparison_qty' is -1.  */
24198944Sobrien
24298944Sobrienstruct qty_table_elem
24398944Sobrien{
24498944Sobrien  rtx const_rtx;
24598944Sobrien  rtx const_insn;
24698944Sobrien  rtx comparison_const;
247130803Smarcel  int comparison_qty;
24898944Sobrien  unsigned int first_reg, last_reg;
24998944Sobrien  enum machine_mode mode;
25098944Sobrien  enum rtx_code comparison_code;
25198944Sobrien};
25298944Sobrien
25398944Sobrien/* The table of all qtys, indexed by qty number.  */
25498944Sobrienstatic struct qty_table_elem *qty_table;
25598944Sobrien
25698944Sobrien#ifdef HAVE_cc0
25798944Sobrien/* For machines that have a CC0, we do not record its value in the hash
25898944Sobrien   table since its use is guaranteed to be the insn immediately following
25998944Sobrien   its definition and any other insn is presumed to invalidate it.
26098944Sobrien
26198944Sobrien   Instead, we store below the value last assigned to CC0.  If it should
26298944Sobrien   happen to be a constant, it is stored in preference to the actual
26398944Sobrien   assigned value.  In case it is a constant, we store the mode in which
26498944Sobrien   the constant should be interpreted.  */
26598944Sobrien
26698944Sobrienstatic rtx prev_insn_cc0;
26798944Sobrienstatic enum machine_mode prev_insn_cc0_mode;
26898944Sobrien#endif
26998944Sobrien
27098944Sobrien/* Previous actual insn.  0 if at first insn of basic block.  */
27198944Sobrien
27298944Sobrienstatic rtx prev_insn;
27398944Sobrien
27498944Sobrien/* Insn being scanned.  */
27598944Sobrien
27698944Sobrienstatic rtx this_insn;
27798944Sobrien
27898944Sobrien/* Index by register number, gives the number of the next (or
27998944Sobrien   previous) register in the chain of registers sharing the same
28098944Sobrien   value.
28198944Sobrien
28298944Sobrien   Or -1 if this register is at the end of the chain.
28398944Sobrien
28498944Sobrien   If reg_qty[N] == N, reg_eqv_table[N].next is undefined.  */
28598944Sobrien
28698944Sobrien/* Per-register equivalence chain.  */
28798944Sobrienstruct reg_eqv_elem
28898944Sobrien{
28998944Sobrien  int next, prev;
29098944Sobrien};
29198944Sobrien
29298944Sobrien/* The table of all register equivalence chains.  */
29398944Sobrienstatic struct reg_eqv_elem *reg_eqv_table;
29498944Sobrien
29598944Sobrienstruct cse_reg_info
29698944Sobrien{
29798944Sobrien  /* Next in hash chain.  */
29898944Sobrien  struct cse_reg_info *hash_next;
29998944Sobrien
30098944Sobrien  /* The next cse_reg_info structure in the free or used list.  */
30198944Sobrien  struct cse_reg_info *next;
30298944Sobrien
30398944Sobrien  /* Search key */
30498944Sobrien  unsigned int regno;
30598944Sobrien
30698944Sobrien  /* The quantity number of the register's current contents.  */
30798944Sobrien  int reg_qty;
30898944Sobrien
30998944Sobrien  /* The number of times the register has been altered in the current
31098944Sobrien     basic block.  */
31198944Sobrien  int reg_tick;
31298944Sobrien
31398944Sobrien  /* The REG_TICK value at which rtx's containing this register are
31498944Sobrien     valid in the hash table.  If this does not equal the current
31598944Sobrien     reg_tick value, such expressions existing in the hash table are
31698944Sobrien     invalid.  */
31798944Sobrien  int reg_in_table;
31898944Sobrien};
31998944Sobrien
32098944Sobrien/* A free list of cse_reg_info entries.  */
32198944Sobrienstatic struct cse_reg_info *cse_reg_info_free_list;
32298944Sobrien
32398944Sobrien/* A used list of cse_reg_info entries.  */
32498944Sobrienstatic struct cse_reg_info *cse_reg_info_used_list;
32598944Sobrienstatic struct cse_reg_info *cse_reg_info_used_list_end;
32698944Sobrien
32798944Sobrien/* A mapping from registers to cse_reg_info data structures.  */
32898944Sobrien#define REGHASH_SHIFT	7
32998944Sobrien#define REGHASH_SIZE	(1 << REGHASH_SHIFT)
33098944Sobrien#define REGHASH_MASK	(REGHASH_SIZE - 1)
33198944Sobrienstatic struct cse_reg_info *reg_hash[REGHASH_SIZE];
33298944Sobrien
33398944Sobrien#define REGHASH_FN(REGNO)	\
33498944Sobrien	(((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
33598944Sobrien
33698944Sobrien/* The last lookup we did into the cse_reg_info_tree.  This allows us
33798944Sobrien   to cache repeated lookups.  */
33898944Sobrienstatic unsigned int cached_regno;
33998944Sobrienstatic struct cse_reg_info *cached_cse_reg_info;
34098944Sobrien
34198944Sobrien/* A HARD_REG_SET containing all the hard registers for which there is
34298944Sobrien   currently a REG expression in the hash table.  Note the difference
34398944Sobrien   from the above variables, which indicate if the REG is mentioned in some
34498944Sobrien   expression in the table.  */
34598944Sobrien
34698944Sobrienstatic HARD_REG_SET hard_regs_in_table;
34798944Sobrien
34898944Sobrien/* CUID of insn that starts the basic block currently being cse-processed.  */
34998944Sobrien
35098944Sobrienstatic int cse_basic_block_start;
35198944Sobrien
35298944Sobrien/* CUID of insn that ends the basic block currently being cse-processed.  */
35398944Sobrien
35498944Sobrienstatic int cse_basic_block_end;
35598944Sobrien
35698944Sobrien/* Vector mapping INSN_UIDs to cuids.
35798944Sobrien   The cuids are like uids but increase monotonically always.
35898944Sobrien   We use them to see whether a reg is used outside a given basic block.  */
35998944Sobrien
36098944Sobrienstatic int *uid_cuid;
36198944Sobrien
36298944Sobrien/* Highest UID in UID_CUID.  */
36398944Sobrienstatic int max_uid;
36498944Sobrien
36598944Sobrien/* Get the cuid of an insn.  */
36698944Sobrien
36798944Sobrien#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
36898944Sobrien
36998944Sobrien/* Nonzero if this pass has made changes, and therefore it's
37098944Sobrien   worthwhile to run the garbage collector.  */
37198944Sobrien
37298944Sobrienstatic int cse_altered;
37398944Sobrien
37498944Sobrien/* Nonzero if cse has altered conditional jump insns
37598944Sobrien   in such a way that jump optimization should be redone.  */
37698944Sobrien
37798944Sobrienstatic int cse_jumps_altered;
37898944Sobrien
379130803Smarcel/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
38098944Sobrien   REG_LABEL, we have to rerun jump after CSE to put in the note.  */
38198944Sobrienstatic int recorded_label_ref;
38298944Sobrien
38398944Sobrien/* canon_hash stores 1 in do_not_record
38498944Sobrien   if it notices a reference to CC0, PC, or some other volatile
38598944Sobrien   subexpression.  */
38698944Sobrien
38798944Sobrienstatic int do_not_record;
38898944Sobrien
38998944Sobrien#ifdef LOAD_EXTEND_OP
39098944Sobrien
39198944Sobrien/* Scratch rtl used when looking for load-extended copy of a MEM.  */
39298944Sobrienstatic rtx memory_extend_rtx;
39398944Sobrien#endif
39498944Sobrien
395130803Smarcel/* canon_hash stores 1 in hash_arg_in_memory
39698944Sobrien   if it notices a reference to memory within the expression being hashed.  */
39798944Sobrien
39898944Sobrienstatic int hash_arg_in_memory;
39998944Sobrien
40098944Sobrien/* The hash table contains buckets which are chains of `struct table_elt's,
40198944Sobrien   each recording one expression's information.
40298944Sobrien   That expression is in the `exp' field.
40398944Sobrien
40498944Sobrien   The canon_exp field contains a canonical (from the point of view of
40598944Sobrien   alias analysis) version of the `exp' field.
40698944Sobrien
40798944Sobrien   Those elements with the same hash code are chained in both directions
40898944Sobrien   through the `next_same_hash' and `prev_same_hash' fields.
40998944Sobrien
41098944Sobrien   Each set of expressions with equivalent values
41198944Sobrien   are on a two-way chain through the `next_same_value'
41298944Sobrien   and `prev_same_value' fields, and all point with
41398944Sobrien   the `first_same_value' field at the first element in
41498944Sobrien   that chain.  The chain is in order of increasing cost.
41598944Sobrien   Each element's cost value is in its `cost' field.
41698944Sobrien
41798944Sobrien   The `in_memory' field is nonzero for elements that
41898944Sobrien   involve any reference to memory.  These elements are removed
41998944Sobrien   whenever a write is done to an unidentified location in memory.
42098944Sobrien   To be safe, we assume that a memory address is unidentified unless
42198944Sobrien   the address is either a symbol constant or a constant plus
42298944Sobrien   the frame pointer or argument pointer.
42398944Sobrien
42498944Sobrien   The `related_value' field is used to connect related expressions
42598944Sobrien   (that differ by adding an integer).
42698944Sobrien   The related expressions are chained in a circular fashion.
42798944Sobrien   `related_value' is zero for expressions for which this
42898944Sobrien   chain is not useful.
42998944Sobrien
43098944Sobrien   The `cost' field stores the cost of this element's expression.
43198944Sobrien   The `regcost' field stores the value returned by approx_reg_cost for
43298944Sobrien   this element's expression.
43398944Sobrien
43498944Sobrien   The `is_const' flag is set if the element is a constant (including
43598944Sobrien   a fixed address).
43698944Sobrien
43798944Sobrien   The `flag' field is used as a temporary during some search routines.
43898944Sobrien
43998944Sobrien   The `mode' field is usually the same as GET_MODE (`exp'), but
44098944Sobrien   if `exp' is a CONST_INT and has no machine mode then the `mode'
44198944Sobrien   field is the mode it was being used as.  Each constant is
44298944Sobrien   recorded separately for each mode it is used with.  */
44398944Sobrien
44498944Sobrienstruct table_elt
44598944Sobrien{
44698944Sobrien  rtx exp;
44798944Sobrien  rtx canon_exp;
44898944Sobrien  struct table_elt *next_same_hash;
44998944Sobrien  struct table_elt *prev_same_hash;
45098944Sobrien  struct table_elt *next_same_value;
45198944Sobrien  struct table_elt *prev_same_value;
45298944Sobrien  struct table_elt *first_same_value;
45398944Sobrien  struct table_elt *related_value;
45498944Sobrien  int cost;
45598944Sobrien  int regcost;
45698944Sobrien  enum machine_mode mode;
45798944Sobrien  char in_memory;
45898944Sobrien  char is_const;
45998944Sobrien  char flag;
46098944Sobrien};
46198944Sobrien
46298944Sobrien/* We don't want a lot of buckets, because we rarely have very many
46398944Sobrien   things stored in the hash table, and a lot of buckets slows
46498944Sobrien   down a lot of loops that happen frequently.  */
46598944Sobrien#define HASH_SHIFT	5
46698944Sobrien#define HASH_SIZE	(1 << HASH_SHIFT)
46798944Sobrien#define HASH_MASK	(HASH_SIZE - 1)
46898944Sobrien
46998944Sobrien/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
47098944Sobrien   register (hard registers may require `do_not_record' to be set).  */
47198944Sobrien
47298944Sobrien#define HASH(X, M)	\
47398944Sobrien ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
47498944Sobrien  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
47598944Sobrien  : canon_hash (X, M)) & HASH_MASK)
47698944Sobrien
47798944Sobrien/* Determine whether register number N is considered a fixed register for the
47898944Sobrien   purpose of approximating register costs.
47998944Sobrien   It is desirable to replace other regs with fixed regs, to reduce need for
48098944Sobrien   non-fixed hard regs.
48198944Sobrien   A reg wins if it is either the frame pointer or designated as fixed.  */
48298944Sobrien#define FIXED_REGNO_P(N)  \
48398944Sobrien  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
48498944Sobrien   || fixed_regs[N] || global_regs[N])
48598944Sobrien
48698944Sobrien/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
48798944Sobrien   hard registers and pointers into the frame are the cheapest with a cost
48898944Sobrien   of 0.  Next come pseudos with a cost of one and other hard registers with
48998944Sobrien   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
49098944Sobrien
49198944Sobrien#define CHEAP_REGNO(N) \
49298944Sobrien  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
49398944Sobrien   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
49498944Sobrien   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
49598944Sobrien   || ((N) < FIRST_PSEUDO_REGISTER					\
49698944Sobrien       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
49798944Sobrien
49898944Sobrien#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
49998944Sobrien#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
50098944Sobrien
50198944Sobrien/* Get the info associated with register N.  */
50298944Sobrien
50398944Sobrien#define GET_CSE_REG_INFO(N) 			\
50498944Sobrien  (((N) == cached_regno && cached_cse_reg_info)	\
50598944Sobrien   ? cached_cse_reg_info : get_cse_reg_info ((N)))
50698944Sobrien
50798944Sobrien/* Get the number of times this register has been updated in this
50898944Sobrien   basic block.  */
50998944Sobrien
51098944Sobrien#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
51198944Sobrien
51298944Sobrien/* Get the point at which REG was recorded in the table.  */
51398944Sobrien
51498944Sobrien#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
51598944Sobrien
51698944Sobrien/* Get the quantity number for REG.  */
51798944Sobrien
51898944Sobrien#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
51998944Sobrien
52098944Sobrien/* Determine if the quantity number for register X represents a valid index
52198944Sobrien   into the qty_table.  */
52298944Sobrien
52398944Sobrien#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
52498944Sobrien
52598944Sobrienstatic struct table_elt *table[HASH_SIZE];
52698944Sobrien
52798944Sobrien/* Chain of `struct table_elt's made so far for this function
52898944Sobrien   but currently removed from the table.  */
52998944Sobrien
53098944Sobrienstatic struct table_elt *free_element_chain;
53198944Sobrien
53298944Sobrien/* Number of `struct table_elt' structures made so far for this function.  */
53398944Sobrien
53498944Sobrienstatic int n_elements_made;
53598944Sobrien
53698944Sobrien/* Maximum value `n_elements_made' has had so far in this compilation
53798944Sobrien   for functions previously processed.  */
53898944Sobrien
53998944Sobrienstatic int max_elements_made;
54098944Sobrien
54198944Sobrien/* Surviving equivalence class when two equivalence classes are merged
54298944Sobrien   by recording the effects of a jump in the last insn.  Zero if the
54398944Sobrien   last insn was not a conditional jump.  */
54498944Sobrien
54598944Sobrienstatic struct table_elt *last_jump_equiv_class;
54698944Sobrien
54798944Sobrien/* Set to the cost of a constant pool reference if one was found for a
54898944Sobrien   symbolic constant.  If this was found, it means we should try to
54998944Sobrien   convert constants into constant pool entries if they don't fit in
55098944Sobrien   the insn.  */
55198944Sobrien
55298944Sobrienstatic int constant_pool_entries_cost;
55398944Sobrien
55498944Sobrien/* Define maximum length of a branch path.  */
55598944Sobrien
55698944Sobrien#define PATHLENGTH	10
55798944Sobrien
55898944Sobrien/* This data describes a block that will be processed by cse_basic_block.  */
55998944Sobrien
56098944Sobrienstruct cse_basic_block_data
56198944Sobrien{
56298944Sobrien  /* Lowest CUID value of insns in block.  */
56398944Sobrien  int low_cuid;
56498944Sobrien  /* Highest CUID value of insns in block.  */
56598944Sobrien  int high_cuid;
56698944Sobrien  /* Total number of SETs in block.  */
56798944Sobrien  int nsets;
56898944Sobrien  /* Last insn in the block.  */
56998944Sobrien  rtx last;
57098944Sobrien  /* Size of current branch path, if any.  */
57198944Sobrien  int path_size;
57298944Sobrien  /* Current branch path, indicating which branches will be taken.  */
57398944Sobrien  struct branch_path
57498944Sobrien    {
57598944Sobrien      /* The branch insn.  */
57698944Sobrien      rtx branch;
57798944Sobrien      /* Whether it should be taken or not.  AROUND is the same as taken
57898944Sobrien	 except that it is used when the destination label is not preceded
57998944Sobrien       by a BARRIER.  */
58098944Sobrien      enum taken {TAKEN, NOT_TAKEN, AROUND} status;
58198944Sobrien    } path[PATHLENGTH];
58298944Sobrien};
58398944Sobrien
58498944Sobrien/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
58598944Sobrien   virtual regs here because the simplify_*_operation routines are called
58698944Sobrien   by integrate.c, which is called before virtual register instantiation.
58798944Sobrien
58898944Sobrien   ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
58998944Sobrien   a header file so that their definitions can be shared with the
59098944Sobrien   simplification routines in simplify-rtx.c.  Until then, do not
59198944Sobrien   change these macros without also changing the copy in simplify-rtx.c.  */
59298944Sobrien
59398944Sobrien#define FIXED_BASE_PLUS_P(X)					\
59498944Sobrien  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
59598944Sobrien   || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
59698944Sobrien   || (X) == virtual_stack_vars_rtx				\
59798944Sobrien   || (X) == virtual_incoming_args_rtx				\
59898944Sobrien   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
59998944Sobrien       && (XEXP (X, 0) == frame_pointer_rtx			\
60098944Sobrien	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
60198944Sobrien	   || ((X) == arg_pointer_rtx				\
60298944Sobrien	       && fixed_regs[ARG_POINTER_REGNUM])		\
60398944Sobrien	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
60498944Sobrien	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
60598944Sobrien   || GET_CODE (X) == ADDRESSOF)
60698944Sobrien
60798944Sobrien/* Similar, but also allows reference to the stack pointer.
60898944Sobrien
60998944Sobrien   This used to include FIXED_BASE_PLUS_P, however, we can't assume that
61098944Sobrien   arg_pointer_rtx by itself is nonzero, because on at least one machine,
61198944Sobrien   the i960, the arg pointer is zero when it is unused.  */
61298944Sobrien
61398944Sobrien#define NONZERO_BASE_PLUS_P(X)					\
61498944Sobrien  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
61598944Sobrien   || (X) == virtual_stack_vars_rtx				\
61698944Sobrien   || (X) == virtual_incoming_args_rtx				\
61798944Sobrien   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
61898944Sobrien       && (XEXP (X, 0) == frame_pointer_rtx			\
61998944Sobrien	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
62098944Sobrien	   || ((X) == arg_pointer_rtx				\
62198944Sobrien	       && fixed_regs[ARG_POINTER_REGNUM])		\
62298944Sobrien	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
62398944Sobrien	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
62498944Sobrien   || (X) == stack_pointer_rtx					\
625130803Smarcel   || (X) == virtual_stack_dynamic_rtx				\
626130803Smarcel   || (X) == virtual_outgoing_args_rtx				\
62798944Sobrien   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
62898944Sobrien       && (XEXP (X, 0) == stack_pointer_rtx			\
62998944Sobrien	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
63098944Sobrien	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
63198944Sobrien   || GET_CODE (X) == ADDRESSOF)
63298944Sobrien
63398944Sobrienstatic int notreg_cost		PARAMS ((rtx, enum rtx_code));
63498944Sobrienstatic int approx_reg_cost_1	PARAMS ((rtx *, void *));
63598944Sobrienstatic int approx_reg_cost	PARAMS ((rtx));
63698944Sobrienstatic int preferrable		PARAMS ((int, int, int, int));
63798944Sobrienstatic void new_basic_block	PARAMS ((void));
63898944Sobrienstatic void make_new_qty	PARAMS ((unsigned int, enum machine_mode));
63998944Sobrienstatic void make_regs_eqv	PARAMS ((unsigned int, unsigned int));
64098944Sobrienstatic void delete_reg_equiv	PARAMS ((unsigned int));
64198944Sobrienstatic int mention_regs		PARAMS ((rtx));
64298944Sobrienstatic int insert_regs		PARAMS ((rtx, struct table_elt *, int));
64398944Sobrienstatic void remove_from_table	PARAMS ((struct table_elt *, unsigned));
64498944Sobrienstatic struct table_elt *lookup	PARAMS ((rtx, unsigned, enum machine_mode)),
64598944Sobrien       *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
64698944Sobrienstatic rtx lookup_as_function	PARAMS ((rtx, enum rtx_code));
64798944Sobrienstatic struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
64898944Sobrien					 enum machine_mode));
64998944Sobrienstatic void merge_equiv_classes PARAMS ((struct table_elt *,
65098944Sobrien					 struct table_elt *));
65198944Sobrienstatic void invalidate		PARAMS ((rtx, enum machine_mode));
65298944Sobrienstatic int cse_rtx_varies_p	PARAMS ((rtx, int));
65398944Sobrienstatic void remove_invalid_refs	PARAMS ((unsigned int));
65498944Sobrienstatic void remove_invalid_subreg_refs	PARAMS ((unsigned int, unsigned int,
65598944Sobrien						 enum machine_mode));
65698944Sobrienstatic void rehash_using_reg	PARAMS ((rtx));
65798944Sobrienstatic void invalidate_memory	PARAMS ((void));
65898944Sobrienstatic void invalidate_for_call	PARAMS ((void));
65998944Sobrienstatic rtx use_related_value	PARAMS ((rtx, struct table_elt *));
66098944Sobrienstatic unsigned canon_hash	PARAMS ((rtx, enum machine_mode));
66198944Sobrienstatic unsigned canon_hash_string PARAMS ((const char *));
66298944Sobrienstatic unsigned safe_hash	PARAMS ((rtx, enum machine_mode));
66398944Sobrienstatic int exp_equiv_p		PARAMS ((rtx, rtx, int, int));
66498944Sobrienstatic rtx canon_reg		PARAMS ((rtx, rtx));
66598944Sobrienstatic void find_best_addr	PARAMS ((rtx, rtx *, enum machine_mode));
66698944Sobrienstatic enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
66798944Sobrien						   enum machine_mode *,
66898944Sobrien						   enum machine_mode *));
66998944Sobrienstatic rtx fold_rtx		PARAMS ((rtx, rtx));
670130803Smarcelstatic rtx equiv_constant	PARAMS ((rtx));
67198944Sobrienstatic void record_jump_equiv	PARAMS ((rtx, int));
67298944Sobrienstatic void record_jump_cond	PARAMS ((enum rtx_code, enum machine_mode,
67398944Sobrien					 rtx, rtx, int));
67498944Sobrienstatic void cse_insn		PARAMS ((rtx, rtx));
67598944Sobrienstatic int addr_affects_sp_p	PARAMS ((rtx));
676130803Smarcelstatic void invalidate_from_clobbers PARAMS ((rtx));
677130803Smarcelstatic rtx cse_process_notes	PARAMS ((rtx, rtx));
678130803Smarcelstatic void cse_around_loop	PARAMS ((rtx));
679130803Smarcelstatic void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
680130803Smarcelstatic void invalidate_skipped_block PARAMS ((rtx));
681130803Smarcelstatic void cse_check_loop_start PARAMS ((rtx, rtx, void *));
68298944Sobrienstatic void cse_set_around_loop	PARAMS ((rtx, rtx, rtx));
683130803Smarcelstatic rtx cse_basic_block	PARAMS ((rtx, rtx, struct branch_path *, int));
68498944Sobrienstatic void count_reg_usage	PARAMS ((rtx, int *, rtx, int));
68598944Sobrienstatic int check_for_label_ref	PARAMS ((rtx *, void *));
68698944Sobrienextern void dump_class          PARAMS ((struct table_elt*));
68798944Sobrienstatic struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
688130803Smarcelstatic int check_dependence	PARAMS ((rtx *, void *));
689130803Smarcel
690130803Smarcelstatic void flush_hash_table	PARAMS ((void));
691130803Smarcelstatic bool insn_live_p		PARAMS ((rtx, int *));
692130803Smarcelstatic bool set_live_p		PARAMS ((rtx, rtx, int *));
693130803Smarcelstatic bool dead_libcall_p	PARAMS ((rtx));
69498944Sobrien
69598944Sobrien/* Dump the expressions in the equivalence class indicated by CLASSP.
69698944Sobrien   This function is used only for debugging.  */
69798944Sobrienvoid
69898944Sobriendump_class (classp)
69998944Sobrien     struct table_elt *classp;
70098944Sobrien{
70198944Sobrien  struct table_elt *elt;
70298944Sobrien
70398944Sobrien  fprintf (stderr, "Equivalence chain for ");
70498944Sobrien  print_rtl (stderr, classp->exp);
70598944Sobrien  fprintf (stderr, ": \n");
706130803Smarcel
70798944Sobrien  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
70898944Sobrien    {
70998944Sobrien      print_rtl (stderr, elt->exp);
71098944Sobrien      fprintf (stderr, "\n");
711130803Smarcel    }
71298944Sobrien}
71398944Sobrien
71498944Sobrien/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
71598944Sobrien
716130803Smarcelstatic int
71798944Sobrienapprox_reg_cost_1 (xp, data)
71898944Sobrien     rtx *xp;
71998944Sobrien     void *data;
720130803Smarcel{
721130803Smarcel  rtx x = *xp;
722130803Smarcel  regset set = (regset) data;
723130803Smarcel
724130803Smarcel  if (x && GET_CODE (x) == REG)
725130803Smarcel    SET_REGNO_REG_SET (set, REGNO (x));
726130803Smarcel  return 0;
727130803Smarcel}
728130803Smarcel
729/* Return an estimate of the cost of the registers used in an rtx.
730   This is mostly the number of different REG expressions in the rtx;
731   however for some exceptions like fixed registers we use a cost of
732   0.  If any other hard register reference occurs, return MAX_COST.  */
733
734static int
735approx_reg_cost (x)
736     rtx x;
737{
738  regset_head set;
739  int i;
740  int cost = 0;
741  int hardregs = 0;
742
743  INIT_REG_SET (&set);
744  for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
745
746  EXECUTE_IF_SET_IN_REG_SET
747    (&set, 0, i,
748     {
749       if (! CHEAP_REGNO (i))
750	 {
751	   if (i < FIRST_PSEUDO_REGISTER)
752	     hardregs++;
753
754	   cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
755	 }
756     });
757
758  CLEAR_REG_SET (&set);
759  return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
760}
761
762/* Return a negative value if an rtx A, whose costs are given by COST_A
763   and REGCOST_A, is more desirable than an rtx B.
764   Return a positive value if A is less desirable, or 0 if the two are
765   equally good.  */
766static int
767preferrable (cost_a, regcost_a, cost_b, regcost_b)
768     int cost_a, regcost_a, cost_b, regcost_b;
769{
770  /* First, get rid of a cases involving expressions that are entirely
771     unwanted.  */
772  if (cost_a != cost_b)
773    {
774      if (cost_a == MAX_COST)
775	return 1;
776      if (cost_b == MAX_COST)
777	return -1;
778    }
779
780  /* Avoid extending lifetimes of hardregs.  */
781  if (regcost_a != regcost_b)
782    {
783      if (regcost_a == MAX_COST)
784	return 1;
785      if (regcost_b == MAX_COST)
786	return -1;
787    }
788
789  /* Normal operation costs take precedence.  */
790  if (cost_a != cost_b)
791    return cost_a - cost_b;
792  /* Only if these are identical consider effects on register pressure.  */
793  if (regcost_a != regcost_b)
794    return regcost_a - regcost_b;
795  return 0;
796}
797
798/* Internal function, to compute cost when X is not a register; called
799   from COST macro to keep it simple.  */
800
801static int
802notreg_cost (x, outer)
803     rtx x;
804     enum rtx_code outer;
805{
806  return ((GET_CODE (x) == SUBREG
807	   && GET_CODE (SUBREG_REG (x)) == REG
808	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
809	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
810	   && (GET_MODE_SIZE (GET_MODE (x))
811	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
812	   && subreg_lowpart_p (x)
813	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
814				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
815	  ? 0
816	  : rtx_cost (x, outer) * 2);
817}
818
819/* Return an estimate of the cost of computing rtx X.
820   One use is in cse, to decide which expression to keep in the hash table.
821   Another is in rtl generation, to pick the cheapest way to multiply.
822   Other uses like the latter are expected in the future.  */
823
824int
825rtx_cost (x, outer_code)
826     rtx x;
827     enum rtx_code outer_code ATTRIBUTE_UNUSED;
828{
829  int i, j;
830  enum rtx_code code;
831  const char *fmt;
832  int total;
833
834  if (x == 0)
835    return 0;
836
837  /* Compute the default costs of certain things.
838     Note that RTX_COSTS can override the defaults.  */
839
840  code = GET_CODE (x);
841  switch (code)
842    {
843    case MULT:
844      /* Count multiplication by 2**n as a shift,
845	 because if we are considering it, we would output it as a shift.  */
846      if (GET_CODE (XEXP (x, 1)) == CONST_INT
847	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
848	total = 2;
849      else
850	total = COSTS_N_INSNS (5);
851      break;
852    case DIV:
853    case UDIV:
854    case MOD:
855    case UMOD:
856      total = COSTS_N_INSNS (7);
857      break;
858    case USE:
859      /* Used in loop.c and combine.c as a marker.  */
860      total = 0;
861      break;
862    default:
863      total = COSTS_N_INSNS (1);
864    }
865
866  switch (code)
867    {
868    case REG:
869      return 0;
870
871    case SUBREG:
872      /* If we can't tie these modes, make this expensive.  The larger
873	 the mode, the more expensive it is.  */
874      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
875	return COSTS_N_INSNS (2
876			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
877      break;
878
879#ifdef RTX_COSTS
880      RTX_COSTS (x, code, outer_code);
881#endif
882#ifdef CONST_COSTS
883      CONST_COSTS (x, code, outer_code);
884#endif
885
886    default:
887#ifdef DEFAULT_RTX_COSTS
888      DEFAULT_RTX_COSTS (x, code, outer_code);
889#endif
890      break;
891    }
892
893  /* Sum the costs of the sub-rtx's, plus cost of this operation,
894     which is already in total.  */
895
896  fmt = GET_RTX_FORMAT (code);
897  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
898    if (fmt[i] == 'e')
899      total += rtx_cost (XEXP (x, i), code);
900    else if (fmt[i] == 'E')
901      for (j = 0; j < XVECLEN (x, i); j++)
902	total += rtx_cost (XVECEXP (x, i, j), code);
903
904  return total;
905}
906
907/* Return cost of address expression X.
908   Expect that X is properly formed address reference.  */
909
910int
911address_cost (x, mode)
912     rtx x;
913     enum machine_mode mode;
914{
915  /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
916     during CSE, such nodes are present.  Using an ADDRESSOF node which
917     refers to the address of a REG is a good thing because we can then
918     turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
919
920  if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
921    return -1;
922
923  /* We may be asked for cost of various unusual addresses, such as operands
924     of push instruction.  It is not worthwhile to complicate writing
925     of ADDRESS_COST macro by such cases.  */
926
927  if (!memory_address_p (mode, x))
928    return 1000;
929#ifdef ADDRESS_COST
930  return ADDRESS_COST (x);
931#else
932  return rtx_cost (x, MEM);
933#endif
934}
935
936
937static struct cse_reg_info *
938get_cse_reg_info (regno)
939     unsigned int regno;
940{
941  struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
942  struct cse_reg_info *p;
943
944  for (p = *hash_head; p != NULL; p = p->hash_next)
945    if (p->regno == regno)
946      break;
947
948  if (p == NULL)
949    {
950      /* Get a new cse_reg_info structure.  */
951      if (cse_reg_info_free_list)
952	{
953	  p = cse_reg_info_free_list;
954	  cse_reg_info_free_list = p->next;
955	}
956      else
957	p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
958
959      /* Insert into hash table.  */
960      p->hash_next = *hash_head;
961      *hash_head = p;
962
963      /* Initialize it.  */
964      p->reg_tick = 1;
965      p->reg_in_table = -1;
966      p->reg_qty = regno;
967      p->regno = regno;
968      p->next = cse_reg_info_used_list;
969      cse_reg_info_used_list = p;
970      if (!cse_reg_info_used_list_end)
971	cse_reg_info_used_list_end = p;
972    }
973
974  /* Cache this lookup; we tend to be looking up information about the
975     same register several times in a row.  */
976  cached_regno = regno;
977  cached_cse_reg_info = p;
978
979  return p;
980}
981
982/* Clear the hash table and initialize each register with its own quantity,
983   for a new basic block.  */
984
985static void
986new_basic_block ()
987{
988  int i;
989
990  next_qty = max_reg;
991
992  /* Clear out hash table state for this pass.  */
993
994  memset ((char *) reg_hash, 0, sizeof reg_hash);
995
996  if (cse_reg_info_used_list)
997    {
998      cse_reg_info_used_list_end->next = cse_reg_info_free_list;
999      cse_reg_info_free_list = cse_reg_info_used_list;
1000      cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1001    }
1002  cached_cse_reg_info = 0;
1003
1004  CLEAR_HARD_REG_SET (hard_regs_in_table);
1005
1006  /* The per-quantity values used to be initialized here, but it is
1007     much faster to initialize each as it is made in `make_new_qty'.  */
1008
1009  for (i = 0; i < HASH_SIZE; i++)
1010    {
1011      struct table_elt *first;
1012
1013      first = table[i];
1014      if (first != NULL)
1015	{
1016	  struct table_elt *last = first;
1017
1018	  table[i] = NULL;
1019
1020	  while (last->next_same_hash != NULL)
1021	    last = last->next_same_hash;
1022
1023	  /* Now relink this hash entire chain into
1024	     the free element list.  */
1025
1026	  last->next_same_hash = free_element_chain;
1027	  free_element_chain = first;
1028	}
1029    }
1030
1031  prev_insn = 0;
1032
1033#ifdef HAVE_cc0
1034  prev_insn_cc0 = 0;
1035#endif
1036}
1037
1038/* Say that register REG contains a quantity in mode MODE not in any
1039   register before and initialize that quantity.  */
1040
1041static void
1042make_new_qty (reg, mode)
1043     unsigned int reg;
1044     enum machine_mode mode;
1045{
1046  int q;
1047  struct qty_table_elem *ent;
1048  struct reg_eqv_elem *eqv;
1049
1050  if (next_qty >= max_qty)
1051    abort ();
1052
1053  q = REG_QTY (reg) = next_qty++;
1054  ent = &qty_table[q];
1055  ent->first_reg = reg;
1056  ent->last_reg = reg;
1057  ent->mode = mode;
1058  ent->const_rtx = ent->const_insn = NULL_RTX;
1059  ent->comparison_code = UNKNOWN;
1060
1061  eqv = &reg_eqv_table[reg];
1062  eqv->next = eqv->prev = -1;
1063}
1064
1065/* Make reg NEW equivalent to reg OLD.
1066   OLD is not changing; NEW is.  */
1067
1068static void
1069make_regs_eqv (new, old)
1070     unsigned int new, old;
1071{
1072  unsigned int lastr, firstr;
1073  int q = REG_QTY (old);
1074  struct qty_table_elem *ent;
1075
1076  ent = &qty_table[q];
1077
1078  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1079  if (! REGNO_QTY_VALID_P (old))
1080    abort ();
1081
1082  REG_QTY (new) = q;
1083  firstr = ent->first_reg;
1084  lastr = ent->last_reg;
1085
1086  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1087     hard regs.  Among pseudos, if NEW will live longer than any other reg
1088     of the same qty, and that is beyond the current basic block,
1089     make it the new canonical replacement for this qty.  */
1090  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1091      /* Certain fixed registers might be of the class NO_REGS.  This means
1092	 that not only can they not be allocated by the compiler, but
1093	 they cannot be used in substitutions or canonicalizations
1094	 either.  */
1095      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1096      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1097	  || (new >= FIRST_PSEUDO_REGISTER
1098	      && (firstr < FIRST_PSEUDO_REGISTER
1099		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1100		       || (uid_cuid[REGNO_FIRST_UID (new)]
1101			   < cse_basic_block_start))
1102		      && (uid_cuid[REGNO_LAST_UID (new)]
1103			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1104    {
1105      reg_eqv_table[firstr].prev = new;
1106      reg_eqv_table[new].next = firstr;
1107      reg_eqv_table[new].prev = -1;
1108      ent->first_reg = new;
1109    }
1110  else
1111    {
1112      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1113	 Otherwise, insert before any non-fixed hard regs that are at the
1114	 end.  Registers of class NO_REGS cannot be used as an
1115	 equivalent for anything.  */
1116      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1117	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1118	     && new >= FIRST_PSEUDO_REGISTER)
1119	lastr = reg_eqv_table[lastr].prev;
1120      reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1121      if (reg_eqv_table[lastr].next >= 0)
1122	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1123      else
1124	qty_table[q].last_reg = new;
1125      reg_eqv_table[lastr].next = new;
1126      reg_eqv_table[new].prev = lastr;
1127    }
1128}
1129
1130/* Remove REG from its equivalence class.  */
1131
1132static void
1133delete_reg_equiv (reg)
1134     unsigned int reg;
1135{
1136  struct qty_table_elem *ent;
1137  int q = REG_QTY (reg);
1138  int p, n;
1139
1140  /* If invalid, do nothing.  */
1141  if (q == (int) reg)
1142    return;
1143
1144  ent = &qty_table[q];
1145
1146  p = reg_eqv_table[reg].prev;
1147  n = reg_eqv_table[reg].next;
1148
1149  if (n != -1)
1150    reg_eqv_table[n].prev = p;
1151  else
1152    ent->last_reg = p;
1153  if (p != -1)
1154    reg_eqv_table[p].next = n;
1155  else
1156    ent->first_reg = n;
1157
1158  REG_QTY (reg) = reg;
1159}
1160
1161/* Remove any invalid expressions from the hash table
1162   that refer to any of the registers contained in expression X.
1163
1164   Make sure that newly inserted references to those registers
1165   as subexpressions will be considered valid.
1166
1167   mention_regs is not called when a register itself
1168   is being stored in the table.
1169
1170   Return 1 if we have done something that may have changed the hash code
1171   of X.  */
1172
1173static int
1174mention_regs (x)
1175     rtx x;
1176{
1177  enum rtx_code code;
1178  int i, j;
1179  const char *fmt;
1180  int changed = 0;
1181
1182  if (x == 0)
1183    return 0;
1184
1185  code = GET_CODE (x);
1186  if (code == REG)
1187    {
1188      unsigned int regno = REGNO (x);
1189      unsigned int endregno
1190	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1191		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1192      unsigned int i;
1193
1194      for (i = regno; i < endregno; i++)
1195	{
1196	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1197	    remove_invalid_refs (i);
1198
1199	  REG_IN_TABLE (i) = REG_TICK (i);
1200	}
1201
1202      return 0;
1203    }
1204
1205  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1206     pseudo if they don't use overlapping words.  We handle only pseudos
1207     here for simplicity.  */
1208  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1209      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1210    {
1211      unsigned int i = REGNO (SUBREG_REG (x));
1212
1213      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1214	{
1215	  /* If reg_tick has been incremented more than once since
1216	     reg_in_table was last set, that means that the entire
1217	     register has been set before, so discard anything memorized
1218	     for the entire register, including all SUBREG expressions.  */
1219	  if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1220	    remove_invalid_refs (i);
1221	  else
1222	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1223	}
1224
1225      REG_IN_TABLE (i) = REG_TICK (i);
1226      return 0;
1227    }
1228
1229  /* If X is a comparison or a COMPARE and either operand is a register
1230     that does not have a quantity, give it one.  This is so that a later
1231     call to record_jump_equiv won't cause X to be assigned a different
1232     hash code and not found in the table after that call.
1233
1234     It is not necessary to do this here, since rehash_using_reg can
1235     fix up the table later, but doing this here eliminates the need to
1236     call that expensive function in the most common case where the only
1237     use of the register is in the comparison.  */
1238
1239  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1240    {
1241      if (GET_CODE (XEXP (x, 0)) == REG
1242	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1243	if (insert_regs (XEXP (x, 0), NULL, 0))
1244	  {
1245	    rehash_using_reg (XEXP (x, 0));
1246	    changed = 1;
1247	  }
1248
1249      if (GET_CODE (XEXP (x, 1)) == REG
1250	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1251	if (insert_regs (XEXP (x, 1), NULL, 0))
1252	  {
1253	    rehash_using_reg (XEXP (x, 1));
1254	    changed = 1;
1255	  }
1256    }
1257
1258  fmt = GET_RTX_FORMAT (code);
1259  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1260    if (fmt[i] == 'e')
1261      changed |= mention_regs (XEXP (x, i));
1262    else if (fmt[i] == 'E')
1263      for (j = 0; j < XVECLEN (x, i); j++)
1264	changed |= mention_regs (XVECEXP (x, i, j));
1265
1266  return changed;
1267}
1268
1269/* Update the register quantities for inserting X into the hash table
1270   with a value equivalent to CLASSP.
1271   (If the class does not contain a REG, it is irrelevant.)
1272   If MODIFIED is nonzero, X is a destination; it is being modified.
1273   Note that delete_reg_equiv should be called on a register
1274   before insert_regs is done on that register with MODIFIED != 0.
1275
1276   Nonzero value means that elements of reg_qty have changed
1277   so X's hash code may be different.  */
1278
1279static int
1280insert_regs (x, classp, modified)
1281     rtx x;
1282     struct table_elt *classp;
1283     int modified;
1284{
1285  if (GET_CODE (x) == REG)
1286    {
1287      unsigned int regno = REGNO (x);
1288      int qty_valid;
1289
1290      /* If REGNO is in the equivalence table already but is of the
1291	 wrong mode for that equivalence, don't do anything here.  */
1292
1293      qty_valid = REGNO_QTY_VALID_P (regno);
1294      if (qty_valid)
1295	{
1296	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1297
1298	  if (ent->mode != GET_MODE (x))
1299	    return 0;
1300	}
1301
1302      if (modified || ! qty_valid)
1303	{
1304	  if (classp)
1305	    for (classp = classp->first_same_value;
1306		 classp != 0;
1307		 classp = classp->next_same_value)
1308	      if (GET_CODE (classp->exp) == REG
1309		  && GET_MODE (classp->exp) == GET_MODE (x))
1310		{
1311		  make_regs_eqv (regno, REGNO (classp->exp));
1312		  return 1;
1313		}
1314
1315	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1316	     than REG_IN_TABLE to find out if there was only a single preceding
1317	     invalidation - for the SUBREG - or another one, which would be
1318	     for the full register.  However, if we find here that REG_TICK
1319	     indicates that the register is invalid, it means that it has
1320	     been invalidated in a separate operation.  The SUBREG might be used
1321	     now (then this is a recursive call), or we might use the full REG
1322	     now and a SUBREG of it later.  So bump up REG_TICK so that
1323	     mention_regs will do the right thing.  */
1324	  if (! modified
1325	      && REG_IN_TABLE (regno) >= 0
1326	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1327	    REG_TICK (regno)++;
1328	  make_new_qty (regno, GET_MODE (x));
1329	  return 1;
1330	}
1331
1332      return 0;
1333    }
1334
1335  /* If X is a SUBREG, we will likely be inserting the inner register in the
1336     table.  If that register doesn't have an assigned quantity number at
1337     this point but does later, the insertion that we will be doing now will
1338     not be accessible because its hash code will have changed.  So assign
1339     a quantity number now.  */
1340
1341  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1342	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1343    {
1344      insert_regs (SUBREG_REG (x), NULL, 0);
1345      mention_regs (x);
1346      return 1;
1347    }
1348  else
1349    return mention_regs (x);
1350}
1351
1352/* Look in or update the hash table.  */
1353
1354/* Remove table element ELT from use in the table.
1355   HASH is its hash code, made using the HASH macro.
1356   It's an argument because often that is known in advance
1357   and we save much time not recomputing it.  */
1358
1359static void
1360remove_from_table (elt, hash)
1361     struct table_elt *elt;
1362     unsigned hash;
1363{
1364  if (elt == 0)
1365    return;
1366
1367  /* Mark this element as removed.  See cse_insn.  */
1368  elt->first_same_value = 0;
1369
1370  /* Remove the table element from its equivalence class.  */
1371
1372  {
1373    struct table_elt *prev = elt->prev_same_value;
1374    struct table_elt *next = elt->next_same_value;
1375
1376    if (next)
1377      next->prev_same_value = prev;
1378
1379    if (prev)
1380      prev->next_same_value = next;
1381    else
1382      {
1383	struct table_elt *newfirst = next;
1384	while (next)
1385	  {
1386	    next->first_same_value = newfirst;
1387	    next = next->next_same_value;
1388	  }
1389      }
1390  }
1391
1392  /* Remove the table element from its hash bucket.  */
1393
1394  {
1395    struct table_elt *prev = elt->prev_same_hash;
1396    struct table_elt *next = elt->next_same_hash;
1397
1398    if (next)
1399      next->prev_same_hash = prev;
1400
1401    if (prev)
1402      prev->next_same_hash = next;
1403    else if (table[hash] == elt)
1404      table[hash] = next;
1405    else
1406      {
1407	/* This entry is not in the proper hash bucket.  This can happen
1408	   when two classes were merged by `merge_equiv_classes'.  Search
1409	   for the hash bucket that it heads.  This happens only very
1410	   rarely, so the cost is acceptable.  */
1411	for (hash = 0; hash < HASH_SIZE; hash++)
1412	  if (table[hash] == elt)
1413	    table[hash] = next;
1414      }
1415  }
1416
1417  /* Remove the table element from its related-value circular chain.  */
1418
1419  if (elt->related_value != 0 && elt->related_value != elt)
1420    {
1421      struct table_elt *p = elt->related_value;
1422
1423      while (p->related_value != elt)
1424	p = p->related_value;
1425      p->related_value = elt->related_value;
1426      if (p->related_value == p)
1427	p->related_value = 0;
1428    }
1429
1430  /* Now add it to the free element chain.  */
1431  elt->next_same_hash = free_element_chain;
1432  free_element_chain = elt;
1433}
1434
1435/* Look up X in the hash table and return its table element,
1436   or 0 if X is not in the table.
1437
1438   MODE is the machine-mode of X, or if X is an integer constant
1439   with VOIDmode then MODE is the mode with which X will be used.
1440
1441   Here we are satisfied to find an expression whose tree structure
1442   looks like X.  */
1443
1444static struct table_elt *
1445lookup (x, hash, mode)
1446     rtx x;
1447     unsigned hash;
1448     enum machine_mode mode;
1449{
1450  struct table_elt *p;
1451
1452  for (p = table[hash]; p; p = p->next_same_hash)
1453    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1454			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1455      return p;
1456
1457  return 0;
1458}
1459
1460/* Like `lookup' but don't care whether the table element uses invalid regs.
1461   Also ignore discrepancies in the machine mode of a register.  */
1462
1463static struct table_elt *
1464lookup_for_remove (x, hash, mode)
1465     rtx x;
1466     unsigned hash;
1467     enum machine_mode mode;
1468{
1469  struct table_elt *p;
1470
1471  if (GET_CODE (x) == REG)
1472    {
1473      unsigned int regno = REGNO (x);
1474
1475      /* Don't check the machine mode when comparing registers;
1476	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1477      for (p = table[hash]; p; p = p->next_same_hash)
1478	if (GET_CODE (p->exp) == REG
1479	    && REGNO (p->exp) == regno)
1480	  return p;
1481    }
1482  else
1483    {
1484      for (p = table[hash]; p; p = p->next_same_hash)
1485	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1486	  return p;
1487    }
1488
1489  return 0;
1490}
1491
1492/* Look for an expression equivalent to X and with code CODE.
1493   If one is found, return that expression.  */
1494
1495static rtx
1496lookup_as_function (x, code)
1497     rtx x;
1498     enum rtx_code code;
1499{
1500  struct table_elt *p
1501    = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1502
1503  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1504     long as we are narrowing.  So if we looked in vain for a mode narrower
1505     than word_mode before, look for word_mode now.  */
1506  if (p == 0 && code == CONST_INT
1507      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1508    {
1509      x = copy_rtx (x);
1510      PUT_MODE (x, word_mode);
1511      p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1512    }
1513
1514  if (p == 0)
1515    return 0;
1516
1517  for (p = p->first_same_value; p; p = p->next_same_value)
1518    if (GET_CODE (p->exp) == code
1519	/* Make sure this is a valid entry in the table.  */
1520	&& exp_equiv_p (p->exp, p->exp, 1, 0))
1521      return p->exp;
1522
1523  return 0;
1524}
1525
1526/* Insert X in the hash table, assuming HASH is its hash code
1527   and CLASSP is an element of the class it should go in
1528   (or 0 if a new class should be made).
1529   It is inserted at the proper position to keep the class in
1530   the order cheapest first.
1531
1532   MODE is the machine-mode of X, or if X is an integer constant
1533   with VOIDmode then MODE is the mode with which X will be used.
1534
1535   For elements of equal cheapness, the most recent one
1536   goes in front, except that the first element in the list
1537   remains first unless a cheaper element is added.  The order of
1538   pseudo-registers does not matter, as canon_reg will be called to
1539   find the cheapest when a register is retrieved from the table.
1540
1541   The in_memory field in the hash table element is set to 0.
1542   The caller must set it nonzero if appropriate.
1543
1544   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1545   and if insert_regs returns a nonzero value
1546   you must then recompute its hash code before calling here.
1547
1548   If necessary, update table showing constant values of quantities.  */
1549
1550#define CHEAPER(X, Y) \
1551 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1552
1553static struct table_elt *
1554insert (x, classp, hash, mode)
1555     rtx x;
1556     struct table_elt *classp;
1557     unsigned hash;
1558     enum machine_mode mode;
1559{
1560  struct table_elt *elt;
1561
1562  /* If X is a register and we haven't made a quantity for it,
1563     something is wrong.  */
1564  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1565    abort ();
1566
1567  /* If X is a hard register, show it is being put in the table.  */
1568  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1569    {
1570      unsigned int regno = REGNO (x);
1571      unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1572      unsigned int i;
1573
1574      for (i = regno; i < endregno; i++)
1575	SET_HARD_REG_BIT (hard_regs_in_table, i);
1576    }
1577
1578  /* Put an element for X into the right hash bucket.  */
1579
1580  elt = free_element_chain;
1581  if (elt)
1582    free_element_chain = elt->next_same_hash;
1583  else
1584    {
1585      n_elements_made++;
1586      elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1587    }
1588
1589  elt->exp = x;
1590  elt->canon_exp = NULL_RTX;
1591  elt->cost = COST (x);
1592  elt->regcost = approx_reg_cost (x);
1593  elt->next_same_value = 0;
1594  elt->prev_same_value = 0;
1595  elt->next_same_hash = table[hash];
1596  elt->prev_same_hash = 0;
1597  elt->related_value = 0;
1598  elt->in_memory = 0;
1599  elt->mode = mode;
1600  elt->is_const = (CONSTANT_P (x)
1601		   /* GNU C++ takes advantage of this for `this'
1602		      (and other const values).  */
1603		   || (RTX_UNCHANGING_P (x)
1604		       && GET_CODE (x) == REG
1605		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1606		   || FIXED_BASE_PLUS_P (x));
1607
1608  if (table[hash])
1609    table[hash]->prev_same_hash = elt;
1610  table[hash] = elt;
1611
1612  /* Put it into the proper value-class.  */
1613  if (classp)
1614    {
1615      classp = classp->first_same_value;
1616      if (CHEAPER (elt, classp))
1617	/* Insert at the head of the class */
1618	{
1619	  struct table_elt *p;
1620	  elt->next_same_value = classp;
1621	  classp->prev_same_value = elt;
1622	  elt->first_same_value = elt;
1623
1624	  for (p = classp; p; p = p->next_same_value)
1625	    p->first_same_value = elt;
1626	}
1627      else
1628	{
1629	  /* Insert not at head of the class.  */
1630	  /* Put it after the last element cheaper than X.  */
1631	  struct table_elt *p, *next;
1632
1633	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1634	       p = next);
1635
1636	  /* Put it after P and before NEXT.  */
1637	  elt->next_same_value = next;
1638	  if (next)
1639	    next->prev_same_value = elt;
1640
1641	  elt->prev_same_value = p;
1642	  p->next_same_value = elt;
1643	  elt->first_same_value = classp;
1644	}
1645    }
1646  else
1647    elt->first_same_value = elt;
1648
1649  /* If this is a constant being set equivalent to a register or a register
1650     being set equivalent to a constant, note the constant equivalence.
1651
1652     If this is a constant, it cannot be equivalent to a different constant,
1653     and a constant is the only thing that can be cheaper than a register.  So
1654     we know the register is the head of the class (before the constant was
1655     inserted).
1656
1657     If this is a register that is not already known equivalent to a
1658     constant, we must check the entire class.
1659
1660     If this is a register that is already known equivalent to an insn,
1661     update the qtys `const_insn' to show that `this_insn' is the latest
1662     insn making that quantity equivalent to the constant.  */
1663
1664  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1665      && GET_CODE (x) != REG)
1666    {
1667      int exp_q = REG_QTY (REGNO (classp->exp));
1668      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1669
1670      exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1671      exp_ent->const_insn = this_insn;
1672    }
1673
1674  else if (GET_CODE (x) == REG
1675	   && classp
1676	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1677	   && ! elt->is_const)
1678    {
1679      struct table_elt *p;
1680
1681      for (p = classp; p != 0; p = p->next_same_value)
1682	{
1683	  if (p->is_const && GET_CODE (p->exp) != REG)
1684	    {
1685	      int x_q = REG_QTY (REGNO (x));
1686	      struct qty_table_elem *x_ent = &qty_table[x_q];
1687
1688	      x_ent->const_rtx
1689		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1690	      x_ent->const_insn = this_insn;
1691	      break;
1692	    }
1693	}
1694    }
1695
1696  else if (GET_CODE (x) == REG
1697	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1698	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1699    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1700
1701  /* If this is a constant with symbolic value,
1702     and it has a term with an explicit integer value,
1703     link it up with related expressions.  */
1704  if (GET_CODE (x) == CONST)
1705    {
1706      rtx subexp = get_related_value (x);
1707      unsigned subhash;
1708      struct table_elt *subelt, *subelt_prev;
1709
1710      if (subexp != 0)
1711	{
1712	  /* Get the integer-free subexpression in the hash table.  */
1713	  subhash = safe_hash (subexp, mode) & HASH_MASK;
1714	  subelt = lookup (subexp, subhash, mode);
1715	  if (subelt == 0)
1716	    subelt = insert (subexp, NULL, subhash, mode);
1717	  /* Initialize SUBELT's circular chain if it has none.  */
1718	  if (subelt->related_value == 0)
1719	    subelt->related_value = subelt;
1720	  /* Find the element in the circular chain that precedes SUBELT.  */
1721	  subelt_prev = subelt;
1722	  while (subelt_prev->related_value != subelt)
1723	    subelt_prev = subelt_prev->related_value;
1724	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1725	     This way the element that follows SUBELT is the oldest one.  */
1726	  elt->related_value = subelt_prev->related_value;
1727	  subelt_prev->related_value = elt;
1728	}
1729    }
1730
1731  return elt;
1732}
1733
1734/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1735   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1736   the two classes equivalent.
1737
1738   CLASS1 will be the surviving class; CLASS2 should not be used after this
1739   call.
1740
1741   Any invalid entries in CLASS2 will not be copied.  */
1742
1743static void
1744merge_equiv_classes (class1, class2)
1745     struct table_elt *class1, *class2;
1746{
1747  struct table_elt *elt, *next, *new;
1748
1749  /* Ensure we start with the head of the classes.  */
1750  class1 = class1->first_same_value;
1751  class2 = class2->first_same_value;
1752
1753  /* If they were already equal, forget it.  */
1754  if (class1 == class2)
1755    return;
1756
1757  for (elt = class2; elt; elt = next)
1758    {
1759      unsigned int hash;
1760      rtx exp = elt->exp;
1761      enum machine_mode mode = elt->mode;
1762
1763      next = elt->next_same_value;
1764
1765      /* Remove old entry, make a new one in CLASS1's class.
1766	 Don't do this for invalid entries as we cannot find their
1767	 hash code (it also isn't necessary).  */
1768      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1769	{
1770	  hash_arg_in_memory = 0;
1771	  hash = HASH (exp, mode);
1772
1773	  if (GET_CODE (exp) == REG)
1774	    delete_reg_equiv (REGNO (exp));
1775
1776	  remove_from_table (elt, hash);
1777
1778	  if (insert_regs (exp, class1, 0))
1779	    {
1780	      rehash_using_reg (exp);
1781	      hash = HASH (exp, mode);
1782	    }
1783	  new = insert (exp, class1, hash, mode);
1784	  new->in_memory = hash_arg_in_memory;
1785	}
1786    }
1787}
1788
1789/* Flush the entire hash table.  */
1790
1791static void
1792flush_hash_table ()
1793{
1794  int i;
1795  struct table_elt *p;
1796
1797  for (i = 0; i < HASH_SIZE; i++)
1798    for (p = table[i]; p; p = table[i])
1799      {
1800	/* Note that invalidate can remove elements
1801	   after P in the current hash chain.  */
1802	if (GET_CODE (p->exp) == REG)
1803	  invalidate (p->exp, p->mode);
1804	else
1805	  remove_from_table (p, i);
1806      }
1807}
1808
1809/* Function called for each rtx to check whether true dependence exist.  */
1810struct check_dependence_data
1811{
1812  enum machine_mode mode;
1813  rtx exp;
1814};
1815
1816static int
1817check_dependence (x, data)
1818     rtx *x;
1819     void *data;
1820{
1821  struct check_dependence_data *d = (struct check_dependence_data *) data;
1822  if (*x && GET_CODE (*x) == MEM)
1823    return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1824  else
1825    return 0;
1826}
1827
1828/* Remove from the hash table, or mark as invalid, all expressions whose
1829   values could be altered by storing in X.  X is a register, a subreg, or
1830   a memory reference with nonvarying address (because, when a memory
1831   reference with a varying address is stored in, all memory references are
1832   removed by invalidate_memory so specific invalidation is superfluous).
1833   FULL_MODE, if not VOIDmode, indicates that this much should be
1834   invalidated instead of just the amount indicated by the mode of X.  This
1835   is only used for bitfield stores into memory.
1836
1837   A nonvarying address may be just a register or just a symbol reference,
1838   or it may be either of those plus a numeric offset.  */
1839
1840static void
1841invalidate (x, full_mode)
1842     rtx x;
1843     enum machine_mode full_mode;
1844{
1845  int i;
1846  struct table_elt *p;
1847
1848  switch (GET_CODE (x))
1849    {
1850    case REG:
1851      {
1852	/* If X is a register, dependencies on its contents are recorded
1853	   through the qty number mechanism.  Just change the qty number of
1854	   the register, mark it as invalid for expressions that refer to it,
1855	   and remove it itself.  */
1856	unsigned int regno = REGNO (x);
1857	unsigned int hash = HASH (x, GET_MODE (x));
1858
1859	/* Remove REGNO from any quantity list it might be on and indicate
1860	   that its value might have changed.  If it is a pseudo, remove its
1861	   entry from the hash table.
1862
1863	   For a hard register, we do the first two actions above for any
1864	   additional hard registers corresponding to X.  Then, if any of these
1865	   registers are in the table, we must remove any REG entries that
1866	   overlap these registers.  */
1867
1868	delete_reg_equiv (regno);
1869	REG_TICK (regno)++;
1870
1871	if (regno >= FIRST_PSEUDO_REGISTER)
1872	  {
1873	    /* Because a register can be referenced in more than one mode,
1874	       we might have to remove more than one table entry.  */
1875	    struct table_elt *elt;
1876
1877	    while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1878	      remove_from_table (elt, hash);
1879	  }
1880	else
1881	  {
1882	    HOST_WIDE_INT in_table
1883	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1884	    unsigned int endregno
1885	      = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1886	    unsigned int tregno, tendregno, rn;
1887	    struct table_elt *p, *next;
1888
1889	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1890
1891	    for (rn = regno + 1; rn < endregno; rn++)
1892	      {
1893		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1894		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1895		delete_reg_equiv (rn);
1896		REG_TICK (rn)++;
1897	      }
1898
1899	    if (in_table)
1900	      for (hash = 0; hash < HASH_SIZE; hash++)
1901		for (p = table[hash]; p; p = next)
1902		  {
1903		    next = p->next_same_hash;
1904
1905		    if (GET_CODE (p->exp) != REG
1906			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1907		      continue;
1908
1909		    tregno = REGNO (p->exp);
1910		    tendregno
1911		      = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1912		    if (tendregno > regno && tregno < endregno)
1913		      remove_from_table (p, hash);
1914		  }
1915	  }
1916      }
1917      return;
1918
1919    case SUBREG:
1920      invalidate (SUBREG_REG (x), VOIDmode);
1921      return;
1922
1923    case PARALLEL:
1924      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1925	invalidate (XVECEXP (x, 0, i), VOIDmode);
1926      return;
1927
1928    case EXPR_LIST:
1929      /* This is part of a disjoint return value; extract the location in
1930	 question ignoring the offset.  */
1931      invalidate (XEXP (x, 0), VOIDmode);
1932      return;
1933
1934    case MEM:
1935      /* Calculate the canonical version of X here so that
1936	 true_dependence doesn't generate new RTL for X on each call.  */
1937      x = canon_rtx (x);
1938
1939      /* Remove all hash table elements that refer to overlapping pieces of
1940	 memory.  */
1941      if (full_mode == VOIDmode)
1942	full_mode = GET_MODE (x);
1943
1944      for (i = 0; i < HASH_SIZE; i++)
1945	{
1946	  struct table_elt *next;
1947
1948	  for (p = table[i]; p; p = next)
1949	    {
1950	      next = p->next_same_hash;
1951	      if (p->in_memory)
1952		{
1953		  struct check_dependence_data d;
1954
1955		  /* Just canonicalize the expression once;
1956		     otherwise each time we call invalidate
1957		     true_dependence will canonicalize the
1958		     expression again.  */
1959		  if (!p->canon_exp)
1960		    p->canon_exp = canon_rtx (p->exp);
1961		  d.exp = x;
1962		  d.mode = full_mode;
1963		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1964		    remove_from_table (p, i);
1965		}
1966	    }
1967	}
1968      return;
1969
1970    default:
1971      abort ();
1972    }
1973}
1974
1975/* Remove all expressions that refer to register REGNO,
1976   since they are already invalid, and we are about to
1977   mark that register valid again and don't want the old
1978   expressions to reappear as valid.  */
1979
1980static void
1981remove_invalid_refs (regno)
1982     unsigned int regno;
1983{
1984  unsigned int i;
1985  struct table_elt *p, *next;
1986
1987  for (i = 0; i < HASH_SIZE; i++)
1988    for (p = table[i]; p; p = next)
1989      {
1990	next = p->next_same_hash;
1991	if (GET_CODE (p->exp) != REG
1992	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
1993	  remove_from_table (p, i);
1994      }
1995}
1996
1997/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1998   and mode MODE.  */
1999static void
2000remove_invalid_subreg_refs (regno, offset, mode)
2001     unsigned int regno;
2002     unsigned int offset;
2003     enum machine_mode mode;
2004{
2005  unsigned int i;
2006  struct table_elt *p, *next;
2007  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2008
2009  for (i = 0; i < HASH_SIZE; i++)
2010    for (p = table[i]; p; p = next)
2011      {
2012	rtx exp = p->exp;
2013	next = p->next_same_hash;
2014
2015	if (GET_CODE (exp) != REG
2016	    && (GET_CODE (exp) != SUBREG
2017		|| GET_CODE (SUBREG_REG (exp)) != REG
2018		|| REGNO (SUBREG_REG (exp)) != regno
2019		|| (((SUBREG_BYTE (exp)
2020		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2021		    && SUBREG_BYTE (exp) <= end))
2022	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
2023	  remove_from_table (p, i);
2024      }
2025}
2026
2027/* Recompute the hash codes of any valid entries in the hash table that
2028   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2029
2030   This is called when we make a jump equivalence.  */
2031
2032static void
2033rehash_using_reg (x)
2034     rtx x;
2035{
2036  unsigned int i;
2037  struct table_elt *p, *next;
2038  unsigned hash;
2039
2040  if (GET_CODE (x) == SUBREG)
2041    x = SUBREG_REG (x);
2042
2043  /* If X is not a register or if the register is known not to be in any
2044     valid entries in the table, we have no work to do.  */
2045
2046  if (GET_CODE (x) != REG
2047      || REG_IN_TABLE (REGNO (x)) < 0
2048      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2049    return;
2050
2051  /* Scan all hash chains looking for valid entries that mention X.
2052     If we find one and it is in the wrong hash chain, move it.  We can skip
2053     objects that are registers, since they are handled specially.  */
2054
2055  for (i = 0; i < HASH_SIZE; i++)
2056    for (p = table[i]; p; p = next)
2057      {
2058	next = p->next_same_hash;
2059	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2060	    && exp_equiv_p (p->exp, p->exp, 1, 0)
2061	    && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2062	  {
2063	    if (p->next_same_hash)
2064	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2065
2066	    if (p->prev_same_hash)
2067	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2068	    else
2069	      table[i] = p->next_same_hash;
2070
2071	    p->next_same_hash = table[hash];
2072	    p->prev_same_hash = 0;
2073	    if (table[hash])
2074	      table[hash]->prev_same_hash = p;
2075	    table[hash] = p;
2076	  }
2077      }
2078}
2079
2080/* Remove from the hash table any expression that is a call-clobbered
2081   register.  Also update their TICK values.  */
2082
2083static void
2084invalidate_for_call ()
2085{
2086  unsigned int regno, endregno;
2087  unsigned int i;
2088  unsigned hash;
2089  struct table_elt *p, *next;
2090  int in_table = 0;
2091
2092  /* Go through all the hard registers.  For each that is clobbered in
2093     a CALL_INSN, remove the register from quantity chains and update
2094     reg_tick if defined.  Also see if any of these registers is currently
2095     in the table.  */
2096
2097  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2098    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2099      {
2100	delete_reg_equiv (regno);
2101	if (REG_TICK (regno) >= 0)
2102	  REG_TICK (regno)++;
2103
2104	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2105      }
2106
2107  /* In the case where we have no call-clobbered hard registers in the
2108     table, we are done.  Otherwise, scan the table and remove any
2109     entry that overlaps a call-clobbered register.  */
2110
2111  if (in_table)
2112    for (hash = 0; hash < HASH_SIZE; hash++)
2113      for (p = table[hash]; p; p = next)
2114	{
2115	  next = p->next_same_hash;
2116
2117	  if (GET_CODE (p->exp) != REG
2118	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2119	    continue;
2120
2121	  regno = REGNO (p->exp);
2122	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2123
2124	  for (i = regno; i < endregno; i++)
2125	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2126	      {
2127		remove_from_table (p, hash);
2128		break;
2129	      }
2130	}
2131}
2132
2133/* Given an expression X of type CONST,
2134   and ELT which is its table entry (or 0 if it
2135   is not in the hash table),
2136   return an alternate expression for X as a register plus integer.
2137   If none can be found, return 0.  */
2138
2139static rtx
2140use_related_value (x, elt)
2141     rtx x;
2142     struct table_elt *elt;
2143{
2144  struct table_elt *relt = 0;
2145  struct table_elt *p, *q;
2146  HOST_WIDE_INT offset;
2147
2148  /* First, is there anything related known?
2149     If we have a table element, we can tell from that.
2150     Otherwise, must look it up.  */
2151
2152  if (elt != 0 && elt->related_value != 0)
2153    relt = elt;
2154  else if (elt == 0 && GET_CODE (x) == CONST)
2155    {
2156      rtx subexp = get_related_value (x);
2157      if (subexp != 0)
2158	relt = lookup (subexp,
2159		       safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2160		       GET_MODE (subexp));
2161    }
2162
2163  if (relt == 0)
2164    return 0;
2165
2166  /* Search all related table entries for one that has an
2167     equivalent register.  */
2168
2169  p = relt;
2170  while (1)
2171    {
2172      /* This loop is strange in that it is executed in two different cases.
2173	 The first is when X is already in the table.  Then it is searching
2174	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2175	 X is not in the table.  Then RELT points to a class for the related
2176	 value.
2177
2178	 Ensure that, whatever case we are in, that we ignore classes that have
2179	 the same value as X.  */
2180
2181      if (rtx_equal_p (x, p->exp))
2182	q = 0;
2183      else
2184	for (q = p->first_same_value; q; q = q->next_same_value)
2185	  if (GET_CODE (q->exp) == REG)
2186	    break;
2187
2188      if (q)
2189	break;
2190
2191      p = p->related_value;
2192
2193      /* We went all the way around, so there is nothing to be found.
2194	 Alternatively, perhaps RELT was in the table for some other reason
2195	 and it has no related values recorded.  */
2196      if (p == relt || p == 0)
2197	break;
2198    }
2199
2200  if (q == 0)
2201    return 0;
2202
2203  offset = (get_integer_term (x) - get_integer_term (p->exp));
2204  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2205  return plus_constant (q->exp, offset);
2206}
2207
2208/* Hash a string.  Just add its bytes up.  */
2209static inline unsigned
2210canon_hash_string (ps)
2211     const char *ps;
2212{
2213  unsigned hash = 0;
2214  const unsigned char *p = (const unsigned char *)ps;
2215
2216  if (p)
2217    while (*p)
2218      hash += *p++;
2219
2220  return hash;
2221}
2222
2223/* Hash an rtx.  We are careful to make sure the value is never negative.
2224   Equivalent registers hash identically.
2225   MODE is used in hashing for CONST_INTs only;
2226   otherwise the mode of X is used.
2227
2228   Store 1 in do_not_record if any subexpression is volatile.
2229
2230   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2231   which does not have the RTX_UNCHANGING_P bit set.
2232
2233   Note that cse_insn knows that the hash code of a MEM expression
2234   is just (int) MEM plus the hash code of the address.  */
2235
2236static unsigned
2237canon_hash (x, mode)
2238     rtx x;
2239     enum machine_mode mode;
2240{
2241  int i, j;
2242  unsigned hash = 0;
2243  enum rtx_code code;
2244  const char *fmt;
2245
2246  /* repeat is used to turn tail-recursion into iteration.  */
2247 repeat:
2248  if (x == 0)
2249    return hash;
2250
2251  code = GET_CODE (x);
2252  switch (code)
2253    {
2254    case REG:
2255      {
2256	unsigned int regno = REGNO (x);
2257
2258	/* On some machines, we can't record any non-fixed hard register,
2259	   because extending its life will cause reload problems.  We
2260	   consider ap, fp, and sp to be fixed for this purpose.
2261
2262	   We also consider CCmode registers to be fixed for this purpose;
2263	   failure to do so leads to failure to simplify 0<100 type of
2264	   conditionals.
2265
2266	   On all machines, we can't record any global registers.
2267	   Nor should we record any register that is in a small
2268	   class, as defined by CLASS_LIKELY_SPILLED_P.  */
2269
2270	if (regno < FIRST_PSEUDO_REGISTER
2271	    && (global_regs[regno]
2272		|| CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno))
2273		|| (SMALL_REGISTER_CLASSES
2274		    && ! fixed_regs[regno]
2275		    && x != frame_pointer_rtx
2276		    && x != hard_frame_pointer_rtx
2277		    && x != arg_pointer_rtx
2278		    && x != stack_pointer_rtx
2279		    && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2280	  {
2281	    do_not_record = 1;
2282	    return 0;
2283	  }
2284
2285	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2286	return hash;
2287      }
2288
2289    /* We handle SUBREG of a REG specially because the underlying
2290       reg changes its hash value with every value change; we don't
2291       want to have to forget unrelated subregs when one subreg changes.  */
2292    case SUBREG:
2293      {
2294	if (GET_CODE (SUBREG_REG (x)) == REG)
2295	  {
2296	    hash += (((unsigned) SUBREG << 7)
2297		     + REGNO (SUBREG_REG (x))
2298		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2299	    return hash;
2300	  }
2301	break;
2302      }
2303
2304    case CONST_INT:
2305      {
2306	unsigned HOST_WIDE_INT tem = INTVAL (x);
2307	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2308	return hash;
2309      }
2310
2311    case CONST_DOUBLE:
2312      /* This is like the general case, except that it only counts
2313	 the integers representing the constant.  */
2314      hash += (unsigned) code + (unsigned) GET_MODE (x);
2315      if (GET_MODE (x) != VOIDmode)
2316	for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2317	  {
2318	    unsigned HOST_WIDE_INT tem = XWINT (x, i);
2319	    hash += tem;
2320	  }
2321      else
2322	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2323		 + (unsigned) CONST_DOUBLE_HIGH (x));
2324      return hash;
2325
2326    case CONST_VECTOR:
2327      {
2328	int units;
2329	rtx elt;
2330
2331	units = CONST_VECTOR_NUNITS (x);
2332
2333	for (i = 0; i < units; ++i)
2334	  {
2335	    elt = CONST_VECTOR_ELT (x, i);
2336	    hash += canon_hash (elt, GET_MODE (elt));
2337	  }
2338
2339	return hash;
2340      }
2341
2342      /* Assume there is only one rtx object for any given label.  */
2343    case LABEL_REF:
2344      hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2345      return hash;
2346
2347    case SYMBOL_REF:
2348      hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2349      return hash;
2350
2351    case MEM:
2352      /* We don't record if marked volatile or if BLKmode since we don't
2353	 know the size of the move.  */
2354      if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2355	{
2356	  do_not_record = 1;
2357	  return 0;
2358	}
2359      if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2360	{
2361	  hash_arg_in_memory = 1;
2362	}
2363      /* Now that we have already found this special case,
2364	 might as well speed it up as much as possible.  */
2365      hash += (unsigned) MEM;
2366      x = XEXP (x, 0);
2367      goto repeat;
2368
2369    case USE:
2370      /* A USE that mentions non-volatile memory needs special
2371	 handling since the MEM may be BLKmode which normally
2372	 prevents an entry from being made.  Pure calls are
2373	 marked by a USE which mentions BLKmode memory.  */
2374      if (GET_CODE (XEXP (x, 0)) == MEM
2375	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2376	{
2377	  hash += (unsigned)USE;
2378	  x = XEXP (x, 0);
2379
2380	  if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2381	    hash_arg_in_memory = 1;
2382
2383	  /* Now that we have already found this special case,
2384	     might as well speed it up as much as possible.  */
2385	  hash += (unsigned) MEM;
2386	  x = XEXP (x, 0);
2387	  goto repeat;
2388	}
2389      break;
2390
2391    case PRE_DEC:
2392    case PRE_INC:
2393    case POST_DEC:
2394    case POST_INC:
2395    case PRE_MODIFY:
2396    case POST_MODIFY:
2397    case PC:
2398    case CC0:
2399    case CALL:
2400    case UNSPEC_VOLATILE:
2401      do_not_record = 1;
2402      return 0;
2403
2404    case ASM_OPERANDS:
2405      if (MEM_VOLATILE_P (x))
2406	{
2407	  do_not_record = 1;
2408	  return 0;
2409	}
2410      else
2411	{
2412	  /* We don't want to take the filename and line into account.  */
2413	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2414	    + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2415	    + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2416	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2417
2418	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2419	    {
2420	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2421		{
2422		  hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2423				       GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2424			   + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2425						(x, i)));
2426		}
2427
2428	      hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2429	      x = ASM_OPERANDS_INPUT (x, 0);
2430	      mode = GET_MODE (x);
2431	      goto repeat;
2432	    }
2433
2434	  return hash;
2435	}
2436      break;
2437
2438    default:
2439      break;
2440    }
2441
2442  i = GET_RTX_LENGTH (code) - 1;
2443  hash += (unsigned) code + (unsigned) GET_MODE (x);
2444  fmt = GET_RTX_FORMAT (code);
2445  for (; i >= 0; i--)
2446    {
2447      if (fmt[i] == 'e')
2448	{
2449	  rtx tem = XEXP (x, i);
2450
2451	  /* If we are about to do the last recursive call
2452	     needed at this level, change it into iteration.
2453	     This function  is called enough to be worth it.  */
2454	  if (i == 0)
2455	    {
2456	      x = tem;
2457	      goto repeat;
2458	    }
2459	  hash += canon_hash (tem, 0);
2460	}
2461      else if (fmt[i] == 'E')
2462	for (j = 0; j < XVECLEN (x, i); j++)
2463	  hash += canon_hash (XVECEXP (x, i, j), 0);
2464      else if (fmt[i] == 's')
2465	hash += canon_hash_string (XSTR (x, i));
2466      else if (fmt[i] == 'i')
2467	{
2468	  unsigned tem = XINT (x, i);
2469	  hash += tem;
2470	}
2471      else if (fmt[i] == '0' || fmt[i] == 't')
2472	/* Unused.  */
2473	;
2474      else
2475	abort ();
2476    }
2477  return hash;
2478}
2479
2480/* Like canon_hash but with no side effects.  */
2481
2482static unsigned
2483safe_hash (x, mode)
2484     rtx x;
2485     enum machine_mode mode;
2486{
2487  int save_do_not_record = do_not_record;
2488  int save_hash_arg_in_memory = hash_arg_in_memory;
2489  unsigned hash = canon_hash (x, mode);
2490  hash_arg_in_memory = save_hash_arg_in_memory;
2491  do_not_record = save_do_not_record;
2492  return hash;
2493}
2494
2495/* Return 1 iff X and Y would canonicalize into the same thing,
2496   without actually constructing the canonicalization of either one.
2497   If VALIDATE is nonzero,
2498   we assume X is an expression being processed from the rtl
2499   and Y was found in the hash table.  We check register refs
2500   in Y for being marked as valid.
2501
2502   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2503   that is known to be in the register.  Ordinarily, we don't allow them
2504   to match, because letting them match would cause unpredictable results
2505   in all the places that search a hash table chain for an equivalent
2506   for a given value.  A possible equivalent that has different structure
2507   has its hash code computed from different data.  Whether the hash code
2508   is the same as that of the given value is pure luck.  */
2509
2510static int
2511exp_equiv_p (x, y, validate, equal_values)
2512     rtx x, y;
2513     int validate;
2514     int equal_values;
2515{
2516  int i, j;
2517  enum rtx_code code;
2518  const char *fmt;
2519
2520  /* Note: it is incorrect to assume an expression is equivalent to itself
2521     if VALIDATE is nonzero.  */
2522  if (x == y && !validate)
2523    return 1;
2524  if (x == 0 || y == 0)
2525    return x == y;
2526
2527  code = GET_CODE (x);
2528  if (code != GET_CODE (y))
2529    {
2530      if (!equal_values)
2531	return 0;
2532
2533      /* If X is a constant and Y is a register or vice versa, they may be
2534	 equivalent.  We only have to validate if Y is a register.  */
2535      if (CONSTANT_P (x) && GET_CODE (y) == REG
2536	  && REGNO_QTY_VALID_P (REGNO (y)))
2537	{
2538	  int y_q = REG_QTY (REGNO (y));
2539	  struct qty_table_elem *y_ent = &qty_table[y_q];
2540
2541	  if (GET_MODE (y) == y_ent->mode
2542	      && rtx_equal_p (x, y_ent->const_rtx)
2543	      && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2544	    return 1;
2545	}
2546
2547      if (CONSTANT_P (y) && code == REG
2548	  && REGNO_QTY_VALID_P (REGNO (x)))
2549	{
2550	  int x_q = REG_QTY (REGNO (x));
2551	  struct qty_table_elem *x_ent = &qty_table[x_q];
2552
2553	  if (GET_MODE (x) == x_ent->mode
2554	      && rtx_equal_p (y, x_ent->const_rtx))
2555	    return 1;
2556	}
2557
2558      return 0;
2559    }
2560
2561  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2562  if (GET_MODE (x) != GET_MODE (y))
2563    return 0;
2564
2565  switch (code)
2566    {
2567    case PC:
2568    case CC0:
2569    case CONST_INT:
2570      return x == y;
2571
2572    case LABEL_REF:
2573      return XEXP (x, 0) == XEXP (y, 0);
2574
2575    case SYMBOL_REF:
2576      return XSTR (x, 0) == XSTR (y, 0);
2577
2578    case REG:
2579      {
2580	unsigned int regno = REGNO (y);
2581	unsigned int endregno
2582	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2583		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2584	unsigned int i;
2585
2586	/* If the quantities are not the same, the expressions are not
2587	   equivalent.  If there are and we are not to validate, they
2588	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2589
2590	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2591	  return 0;
2592
2593	if (! validate)
2594	  return 1;
2595
2596	for (i = regno; i < endregno; i++)
2597	  if (REG_IN_TABLE (i) != REG_TICK (i))
2598	    return 0;
2599
2600	return 1;
2601      }
2602
2603    /*  For commutative operations, check both orders.  */
2604    case PLUS:
2605    case MULT:
2606    case AND:
2607    case IOR:
2608    case XOR:
2609    case NE:
2610    case EQ:
2611      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2612	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2613			       validate, equal_values))
2614	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2615			       validate, equal_values)
2616		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2617				  validate, equal_values)));
2618
2619    case ASM_OPERANDS:
2620      /* We don't use the generic code below because we want to
2621	 disregard filename and line numbers.  */
2622
2623      /* A volatile asm isn't equivalent to any other.  */
2624      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2625	return 0;
2626
2627      if (GET_MODE (x) != GET_MODE (y)
2628	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2629	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2630		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2631	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2632	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2633	return 0;
2634
2635      if (ASM_OPERANDS_INPUT_LENGTH (x))
2636	{
2637	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2638	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2639			       ASM_OPERANDS_INPUT (y, i),
2640			       validate, equal_values)
2641		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2642			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2643	      return 0;
2644	}
2645
2646      return 1;
2647
2648    default:
2649      break;
2650    }
2651
2652  /* Compare the elements.  If any pair of corresponding elements
2653     fail to match, return 0 for the whole things.  */
2654
2655  fmt = GET_RTX_FORMAT (code);
2656  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2657    {
2658      switch (fmt[i])
2659	{
2660	case 'e':
2661	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2662	    return 0;
2663	  break;
2664
2665	case 'E':
2666	  if (XVECLEN (x, i) != XVECLEN (y, i))
2667	    return 0;
2668	  for (j = 0; j < XVECLEN (x, i); j++)
2669	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2670			       validate, equal_values))
2671	      return 0;
2672	  break;
2673
2674	case 's':
2675	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2676	    return 0;
2677	  break;
2678
2679	case 'i':
2680	  if (XINT (x, i) != XINT (y, i))
2681	    return 0;
2682	  break;
2683
2684	case 'w':
2685	  if (XWINT (x, i) != XWINT (y, i))
2686	    return 0;
2687	  break;
2688
2689	case '0':
2690	case 't':
2691	  break;
2692
2693	default:
2694	  abort ();
2695	}
2696    }
2697
2698  return 1;
2699}
2700
2701/* Return 1 if X has a value that can vary even between two
2702   executions of the program.  0 means X can be compared reliably
2703   against certain constants or near-constants.  */
2704
2705static int
2706cse_rtx_varies_p (x, from_alias)
2707     rtx x;
2708     int from_alias;
2709{
2710  /* We need not check for X and the equivalence class being of the same
2711     mode because if X is equivalent to a constant in some mode, it
2712     doesn't vary in any mode.  */
2713
2714  if (GET_CODE (x) == REG
2715      && REGNO_QTY_VALID_P (REGNO (x)))
2716    {
2717      int x_q = REG_QTY (REGNO (x));
2718      struct qty_table_elem *x_ent = &qty_table[x_q];
2719
2720      if (GET_MODE (x) == x_ent->mode
2721	  && x_ent->const_rtx != NULL_RTX)
2722	return 0;
2723    }
2724
2725  if (GET_CODE (x) == PLUS
2726      && GET_CODE (XEXP (x, 1)) == CONST_INT
2727      && GET_CODE (XEXP (x, 0)) == REG
2728      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2729    {
2730      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2731      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2732
2733      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2734	  && x0_ent->const_rtx != NULL_RTX)
2735	return 0;
2736    }
2737
2738  /* This can happen as the result of virtual register instantiation, if
2739     the initial constant is too large to be a valid address.  This gives
2740     us a three instruction sequence, load large offset into a register,
2741     load fp minus a constant into a register, then a MEM which is the
2742     sum of the two `constant' registers.  */
2743  if (GET_CODE (x) == PLUS
2744      && GET_CODE (XEXP (x, 0)) == REG
2745      && GET_CODE (XEXP (x, 1)) == REG
2746      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2747      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2748    {
2749      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2750      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2751      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2752      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2753
2754      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2755	  && x0_ent->const_rtx != NULL_RTX
2756	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2757	  && x1_ent->const_rtx != NULL_RTX)
2758	return 0;
2759    }
2760
2761  return rtx_varies_p (x, from_alias);
2762}
2763
2764/* Canonicalize an expression:
2765   replace each register reference inside it
2766   with the "oldest" equivalent register.
2767
2768   If INSN is non-zero and we are replacing a pseudo with a hard register
2769   or vice versa, validate_change is used to ensure that INSN remains valid
2770   after we make our substitution.  The calls are made with IN_GROUP non-zero
2771   so apply_change_group must be called upon the outermost return from this
2772   function (unless INSN is zero).  The result of apply_change_group can
2773   generally be discarded since the changes we are making are optional.  */
2774
2775static rtx
2776canon_reg (x, insn)
2777     rtx x;
2778     rtx insn;
2779{
2780  int i;
2781  enum rtx_code code;
2782  const char *fmt;
2783
2784  if (x == 0)
2785    return x;
2786
2787  code = GET_CODE (x);
2788  switch (code)
2789    {
2790    case PC:
2791    case CC0:
2792    case CONST:
2793    case CONST_INT:
2794    case CONST_DOUBLE:
2795    case CONST_VECTOR:
2796    case SYMBOL_REF:
2797    case LABEL_REF:
2798    case ADDR_VEC:
2799    case ADDR_DIFF_VEC:
2800      return x;
2801
2802    case REG:
2803      {
2804	int first;
2805	int q;
2806	struct qty_table_elem *ent;
2807
2808	/* Never replace a hard reg, because hard regs can appear
2809	   in more than one machine mode, and we must preserve the mode
2810	   of each occurrence.  Also, some hard regs appear in
2811	   MEMs that are shared and mustn't be altered.  Don't try to
2812	   replace any reg that maps to a reg of class NO_REGS.  */
2813	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2814	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2815	  return x;
2816
2817	q = REG_QTY (REGNO (x));
2818	ent = &qty_table[q];
2819	first = ent->first_reg;
2820	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2821		: REGNO_REG_CLASS (first) == NO_REGS ? x
2822		: gen_rtx_REG (ent->mode, first));
2823      }
2824
2825    default:
2826      break;
2827    }
2828
2829  fmt = GET_RTX_FORMAT (code);
2830  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2831    {
2832      int j;
2833
2834      if (fmt[i] == 'e')
2835	{
2836	  rtx new = canon_reg (XEXP (x, i), insn);
2837	  int insn_code;
2838
2839	  /* If replacing pseudo with hard reg or vice versa, ensure the
2840	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2841	  if (insn != 0 && new != 0
2842	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2843	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2844		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2845		  || (insn_code = recog_memoized (insn)) < 0
2846		  || insn_data[insn_code].n_dups > 0))
2847	    validate_change (insn, &XEXP (x, i), new, 1);
2848	  else
2849	    XEXP (x, i) = new;
2850	}
2851      else if (fmt[i] == 'E')
2852	for (j = 0; j < XVECLEN (x, i); j++)
2853	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2854    }
2855
2856  return x;
2857}
2858
2859/* LOC is a location within INSN that is an operand address (the contents of
2860   a MEM).  Find the best equivalent address to use that is valid for this
2861   insn.
2862
2863   On most CISC machines, complicated address modes are costly, and rtx_cost
2864   is a good approximation for that cost.  However, most RISC machines have
2865   only a few (usually only one) memory reference formats.  If an address is
2866   valid at all, it is often just as cheap as any other address.  Hence, for
2867   RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2868   costs of various addresses.  For two addresses of equal cost, choose the one
2869   with the highest `rtx_cost' value as that has the potential of eliminating
2870   the most insns.  For equal costs, we choose the first in the equivalence
2871   class.  Note that we ignore the fact that pseudo registers are cheaper
2872   than hard registers here because we would also prefer the pseudo registers.
2873  */
2874
2875static void
2876find_best_addr (insn, loc, mode)
2877     rtx insn;
2878     rtx *loc;
2879     enum machine_mode mode;
2880{
2881  struct table_elt *elt;
2882  rtx addr = *loc;
2883#ifdef ADDRESS_COST
2884  struct table_elt *p;
2885  int found_better = 1;
2886#endif
2887  int save_do_not_record = do_not_record;
2888  int save_hash_arg_in_memory = hash_arg_in_memory;
2889  int addr_volatile;
2890  int regno;
2891  unsigned hash;
2892
2893  /* Do not try to replace constant addresses or addresses of local and
2894     argument slots.  These MEM expressions are made only once and inserted
2895     in many instructions, as well as being used to control symbol table
2896     output.  It is not safe to clobber them.
2897
2898     There are some uncommon cases where the address is already in a register
2899     for some reason, but we cannot take advantage of that because we have
2900     no easy way to unshare the MEM.  In addition, looking up all stack
2901     addresses is costly.  */
2902  if ((GET_CODE (addr) == PLUS
2903       && GET_CODE (XEXP (addr, 0)) == REG
2904       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2905       && (regno = REGNO (XEXP (addr, 0)),
2906	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2907	   || regno == ARG_POINTER_REGNUM))
2908      || (GET_CODE (addr) == REG
2909	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2910	      || regno == HARD_FRAME_POINTER_REGNUM
2911	      || regno == ARG_POINTER_REGNUM))
2912      || GET_CODE (addr) == ADDRESSOF
2913      || CONSTANT_ADDRESS_P (addr))
2914    return;
2915
2916  /* If this address is not simply a register, try to fold it.  This will
2917     sometimes simplify the expression.  Many simplifications
2918     will not be valid, but some, usually applying the associative rule, will
2919     be valid and produce better code.  */
2920  if (GET_CODE (addr) != REG)
2921    {
2922      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2923      int addr_folded_cost = address_cost (folded, mode);
2924      int addr_cost = address_cost (addr, mode);
2925
2926      if ((addr_folded_cost < addr_cost
2927	   || (addr_folded_cost == addr_cost
2928	       /* ??? The rtx_cost comparison is left over from an older
2929		  version of this code.  It is probably no longer helpful.  */
2930	       && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2931		   || approx_reg_cost (folded) < approx_reg_cost (addr))))
2932	  && validate_change (insn, loc, folded, 0))
2933	addr = folded;
2934    }
2935
2936  /* If this address is not in the hash table, we can't look for equivalences
2937     of the whole address.  Also, ignore if volatile.  */
2938
2939  do_not_record = 0;
2940  hash = HASH (addr, Pmode);
2941  addr_volatile = do_not_record;
2942  do_not_record = save_do_not_record;
2943  hash_arg_in_memory = save_hash_arg_in_memory;
2944
2945  if (addr_volatile)
2946    return;
2947
2948  elt = lookup (addr, hash, Pmode);
2949
2950#ifndef ADDRESS_COST
2951  if (elt)
2952    {
2953      int our_cost = elt->cost;
2954
2955      /* Find the lowest cost below ours that works.  */
2956      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2957	if (elt->cost < our_cost
2958	    && (GET_CODE (elt->exp) == REG
2959		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2960	    && validate_change (insn, loc,
2961				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2962	  return;
2963    }
2964#else
2965
2966  if (elt)
2967    {
2968      /* We need to find the best (under the criteria documented above) entry
2969	 in the class that is valid.  We use the `flag' field to indicate
2970	 choices that were invalid and iterate until we can't find a better
2971	 one that hasn't already been tried.  */
2972
2973      for (p = elt->first_same_value; p; p = p->next_same_value)
2974	p->flag = 0;
2975
2976      while (found_better)
2977	{
2978	  int best_addr_cost = address_cost (*loc, mode);
2979	  int best_rtx_cost = (elt->cost + 1) >> 1;
2980	  int exp_cost;
2981	  struct table_elt *best_elt = elt;
2982
2983	  found_better = 0;
2984	  for (p = elt->first_same_value; p; p = p->next_same_value)
2985	    if (! p->flag)
2986	      {
2987		if ((GET_CODE (p->exp) == REG
2988		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2989		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2990			|| (exp_cost == best_addr_cost
2991			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
2992		  {
2993		    found_better = 1;
2994		    best_addr_cost = exp_cost;
2995		    best_rtx_cost = (p->cost + 1) >> 1;
2996		    best_elt = p;
2997		  }
2998	      }
2999
3000	  if (found_better)
3001	    {
3002	      if (validate_change (insn, loc,
3003				   canon_reg (copy_rtx (best_elt->exp),
3004					      NULL_RTX), 0))
3005		return;
3006	      else
3007		best_elt->flag = 1;
3008	    }
3009	}
3010    }
3011
3012  /* If the address is a binary operation with the first operand a register
3013     and the second a constant, do the same as above, but looking for
3014     equivalences of the register.  Then try to simplify before checking for
3015     the best address to use.  This catches a few cases:  First is when we
3016     have REG+const and the register is another REG+const.  We can often merge
3017     the constants and eliminate one insn and one register.  It may also be
3018     that a machine has a cheap REG+REG+const.  Finally, this improves the
3019     code on the Alpha for unaligned byte stores.  */
3020
3021  if (flag_expensive_optimizations
3022      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3023	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3024      && GET_CODE (XEXP (*loc, 0)) == REG
3025      && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3026    {
3027      rtx c = XEXP (*loc, 1);
3028
3029      do_not_record = 0;
3030      hash = HASH (XEXP (*loc, 0), Pmode);
3031      do_not_record = save_do_not_record;
3032      hash_arg_in_memory = save_hash_arg_in_memory;
3033
3034      elt = lookup (XEXP (*loc, 0), hash, Pmode);
3035      if (elt == 0)
3036	return;
3037
3038      /* We need to find the best (under the criteria documented above) entry
3039	 in the class that is valid.  We use the `flag' field to indicate
3040	 choices that were invalid and iterate until we can't find a better
3041	 one that hasn't already been tried.  */
3042
3043      for (p = elt->first_same_value; p; p = p->next_same_value)
3044	p->flag = 0;
3045
3046      while (found_better)
3047	{
3048	  int best_addr_cost = address_cost (*loc, mode);
3049	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
3050	  struct table_elt *best_elt = elt;
3051	  rtx best_rtx = *loc;
3052	  int count;
3053
3054	  /* This is at worst case an O(n^2) algorithm, so limit our search
3055	     to the first 32 elements on the list.  This avoids trouble
3056	     compiling code with very long basic blocks that can easily
3057	     call simplify_gen_binary so many times that we run out of
3058	     memory.  */
3059
3060	  found_better = 0;
3061	  for (p = elt->first_same_value, count = 0;
3062	       p && count < 32;
3063	       p = p->next_same_value, count++)
3064	    if (! p->flag
3065		&& (GET_CODE (p->exp) == REG
3066		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3067	      {
3068		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3069					       p->exp, c);
3070		int new_cost;
3071		new_cost = address_cost (new, mode);
3072
3073		if (new_cost < best_addr_cost
3074		    || (new_cost == best_addr_cost
3075			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3076		  {
3077		    found_better = 1;
3078		    best_addr_cost = new_cost;
3079		    best_rtx_cost = (COST (new) + 1) >> 1;
3080		    best_elt = p;
3081		    best_rtx = new;
3082		  }
3083	      }
3084
3085	  if (found_better)
3086	    {
3087	      if (validate_change (insn, loc,
3088				   canon_reg (copy_rtx (best_rtx),
3089					      NULL_RTX), 0))
3090		return;
3091	      else
3092		best_elt->flag = 1;
3093	    }
3094	}
3095    }
3096#endif
3097}
3098
3099/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3100   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3101   what values are being compared.
3102
3103   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3104   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3105   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3106   compared to produce cc0.
3107
3108   The return value is the comparison operator and is either the code of
3109   A or the code corresponding to the inverse of the comparison.  */
3110
3111static enum rtx_code
3112find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3113     enum rtx_code code;
3114     rtx *parg1, *parg2;
3115     enum machine_mode *pmode1, *pmode2;
3116{
3117  rtx arg1, arg2;
3118
3119  arg1 = *parg1, arg2 = *parg2;
3120
3121  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3122
3123  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3124    {
3125      /* Set non-zero when we find something of interest.  */
3126      rtx x = 0;
3127      int reverse_code = 0;
3128      struct table_elt *p = 0;
3129
3130      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3131	 On machines with CC0, this is the only case that can occur, since
3132	 fold_rtx will return the COMPARE or item being compared with zero
3133	 when given CC0.  */
3134
3135      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3136	x = arg1;
3137
3138      /* If ARG1 is a comparison operator and CODE is testing for
3139	 STORE_FLAG_VALUE, get the inner arguments.  */
3140
3141      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3142	{
3143	  if (code == NE
3144	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3145		  && code == LT && STORE_FLAG_VALUE == -1)
3146#ifdef FLOAT_STORE_FLAG_VALUE
3147	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3148		  && (REAL_VALUE_NEGATIVE
3149		      (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3150#endif
3151	      )
3152	    x = arg1;
3153	  else if (code == EQ
3154		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3155		       && code == GE && STORE_FLAG_VALUE == -1)
3156#ifdef FLOAT_STORE_FLAG_VALUE
3157		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3158		       && (REAL_VALUE_NEGATIVE
3159			   (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3160#endif
3161		   )
3162	    x = arg1, reverse_code = 1;
3163	}
3164
3165      /* ??? We could also check for
3166
3167	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3168
3169	 and related forms, but let's wait until we see them occurring.  */
3170
3171      if (x == 0)
3172	/* Look up ARG1 in the hash table and see if it has an equivalence
3173	   that lets us see what is being compared.  */
3174	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3175		    GET_MODE (arg1));
3176      if (p)
3177	{
3178	  p = p->first_same_value;
3179
3180	  /* If what we compare is already known to be constant, that is as
3181	     good as it gets.
3182	     We need to break the loop in this case, because otherwise we
3183	     can have an infinite loop when looking at a reg that is known
3184	     to be a constant which is the same as a comparison of a reg
3185	     against zero which appears later in the insn stream, which in
3186	     turn is constant and the same as the comparison of the first reg
3187	     against zero...  */
3188	  if (p->is_const)
3189	    break;
3190	}
3191
3192      for (; p; p = p->next_same_value)
3193	{
3194	  enum machine_mode inner_mode = GET_MODE (p->exp);
3195
3196	  /* If the entry isn't valid, skip it.  */
3197	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3198	    continue;
3199
3200	  if (GET_CODE (p->exp) == COMPARE
3201	      /* Another possibility is that this machine has a compare insn
3202		 that includes the comparison code.  In that case, ARG1 would
3203		 be equivalent to a comparison operation that would set ARG1 to
3204		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3205		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3206		 we must reverse ORIG_CODE.  On machine with a negative value
3207		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3208	      || ((code == NE
3209		   || (code == LT
3210		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3211		       && (GET_MODE_BITSIZE (inner_mode)
3212			   <= HOST_BITS_PER_WIDE_INT)
3213		       && (STORE_FLAG_VALUE
3214			   & ((HOST_WIDE_INT) 1
3215			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3216#ifdef FLOAT_STORE_FLAG_VALUE
3217		   || (code == LT
3218		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3219		       && (REAL_VALUE_NEGATIVE
3220			   (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3221#endif
3222		   )
3223		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3224	    {
3225	      x = p->exp;
3226	      break;
3227	    }
3228	  else if ((code == EQ
3229		    || (code == GE
3230			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3231			&& (GET_MODE_BITSIZE (inner_mode)
3232			    <= HOST_BITS_PER_WIDE_INT)
3233			&& (STORE_FLAG_VALUE
3234			    & ((HOST_WIDE_INT) 1
3235			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3236#ifdef FLOAT_STORE_FLAG_VALUE
3237		    || (code == GE
3238			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3239		        && (REAL_VALUE_NEGATIVE
3240			    (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3241#endif
3242		    )
3243		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3244	    {
3245	      reverse_code = 1;
3246	      x = p->exp;
3247	      break;
3248	    }
3249
3250	  /* If this is fp + constant, the equivalent is a better operand since
3251	     it may let us predict the value of the comparison.  */
3252	  else if (NONZERO_BASE_PLUS_P (p->exp))
3253	    {
3254	      arg1 = p->exp;
3255	      continue;
3256	    }
3257	}
3258
3259      /* If we didn't find a useful equivalence for ARG1, we are done.
3260	 Otherwise, set up for the next iteration.  */
3261      if (x == 0)
3262	break;
3263
3264      /* If we need to reverse the comparison, make sure that that is
3265	 possible -- we can't necessarily infer the value of GE from LT
3266	 with floating-point operands.  */
3267      if (reverse_code)
3268	{
3269	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3270	  if (reversed == UNKNOWN)
3271	    break;
3272	  else code = reversed;
3273	}
3274      else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3275	code = GET_CODE (x);
3276      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3277    }
3278
3279  /* Return our results.  Return the modes from before fold_rtx
3280     because fold_rtx might produce const_int, and then it's too late.  */
3281  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3282  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3283
3284  return code;
3285}
3286
3287/* If X is a nontrivial arithmetic operation on an argument
3288   for which a constant value can be determined, return
3289   the result of operating on that value, as a constant.
3290   Otherwise, return X, possibly with one or more operands
3291   modified by recursive calls to this function.
3292
3293   If X is a register whose contents are known, we do NOT
3294   return those contents here.  equiv_constant is called to
3295   perform that task.
3296
3297   INSN is the insn that we may be modifying.  If it is 0, make a copy
3298   of X before modifying it.  */
3299
3300static rtx
3301fold_rtx (x, insn)
3302     rtx x;
3303     rtx insn;
3304{
3305  enum rtx_code code;
3306  enum machine_mode mode;
3307  const char *fmt;
3308  int i;
3309  rtx new = 0;
3310  int copied = 0;
3311  int must_swap = 0;
3312
3313  /* Folded equivalents of first two operands of X.  */
3314  rtx folded_arg0;
3315  rtx folded_arg1;
3316
3317  /* Constant equivalents of first three operands of X;
3318     0 when no such equivalent is known.  */
3319  rtx const_arg0;
3320  rtx const_arg1;
3321  rtx const_arg2;
3322
3323  /* The mode of the first operand of X.  We need this for sign and zero
3324     extends.  */
3325  enum machine_mode mode_arg0;
3326
3327  if (x == 0)
3328    return x;
3329
3330  mode = GET_MODE (x);
3331  code = GET_CODE (x);
3332  switch (code)
3333    {
3334    case CONST:
3335    case CONST_INT:
3336    case CONST_DOUBLE:
3337    case CONST_VECTOR:
3338    case SYMBOL_REF:
3339    case LABEL_REF:
3340    case REG:
3341      /* No use simplifying an EXPR_LIST
3342	 since they are used only for lists of args
3343	 in a function call's REG_EQUAL note.  */
3344    case EXPR_LIST:
3345      /* Changing anything inside an ADDRESSOF is incorrect; we don't
3346	 want to (e.g.,) make (addressof (const_int 0)) just because
3347	 the location is known to be zero.  */
3348    case ADDRESSOF:
3349      return x;
3350
3351#ifdef HAVE_cc0
3352    case CC0:
3353      return prev_insn_cc0;
3354#endif
3355
3356    case PC:
3357      /* If the next insn is a CODE_LABEL followed by a jump table,
3358	 PC's value is a LABEL_REF pointing to that label.  That
3359	 lets us fold switch statements on the VAX.  */
3360      if (insn && GET_CODE (insn) == JUMP_INSN)
3361	{
3362	  rtx next = next_nonnote_insn (insn);
3363
3364	  if (next && GET_CODE (next) == CODE_LABEL
3365	      && NEXT_INSN (next) != 0
3366	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3367	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3368		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3369	    return gen_rtx_LABEL_REF (Pmode, next);
3370	}
3371      break;
3372
3373    case SUBREG:
3374      /* See if we previously assigned a constant value to this SUBREG.  */
3375      if ((new = lookup_as_function (x, CONST_INT)) != 0
3376	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3377	return new;
3378
3379      /* If this is a paradoxical SUBREG, we have no idea what value the
3380	 extra bits would have.  However, if the operand is equivalent
3381	 to a SUBREG whose operand is the same as our mode, and all the
3382	 modes are within a word, we can just use the inner operand
3383	 because these SUBREGs just say how to treat the register.
3384
3385	 Similarly if we find an integer constant.  */
3386
3387      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3388	{
3389	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3390	  struct table_elt *elt;
3391
3392	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3393	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3394	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3395				imode)) != 0)
3396	    for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3397	      {
3398		if (CONSTANT_P (elt->exp)
3399		    && GET_MODE (elt->exp) == VOIDmode)
3400		  return elt->exp;
3401
3402		if (GET_CODE (elt->exp) == SUBREG
3403		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
3404		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3405		  return copy_rtx (SUBREG_REG (elt->exp));
3406	      }
3407
3408	  return x;
3409	}
3410
3411      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
3412	 We might be able to if the SUBREG is extracting a single word in an
3413	 integral mode or extracting the low part.  */
3414
3415      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3416      const_arg0 = equiv_constant (folded_arg0);
3417      if (const_arg0)
3418	folded_arg0 = const_arg0;
3419
3420      if (folded_arg0 != SUBREG_REG (x))
3421	{
3422	  new = simplify_subreg (mode, folded_arg0,
3423				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3424	  if (new)
3425	    return new;
3426	}
3427
3428      /* If this is a narrowing SUBREG and our operand is a REG, see if
3429	 we can find an equivalence for REG that is an arithmetic operation
3430	 in a wider mode where both operands are paradoxical SUBREGs
3431	 from objects of our result mode.  In that case, we couldn't report
3432	 an equivalent value for that operation, since we don't know what the
3433	 extra bits will be.  But we can find an equivalence for this SUBREG
3434	 by folding that operation is the narrow mode.  This allows us to
3435	 fold arithmetic in narrow modes when the machine only supports
3436	 word-sized arithmetic.
3437
3438	 Also look for a case where we have a SUBREG whose operand is the
3439	 same as our result.  If both modes are smaller than a word, we
3440	 are simply interpreting a register in different modes and we
3441	 can use the inner value.  */
3442
3443      if (GET_CODE (folded_arg0) == REG
3444	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3445	  && subreg_lowpart_p (x))
3446	{
3447	  struct table_elt *elt;
3448
3449	  /* We can use HASH here since we know that canon_hash won't be
3450	     called.  */
3451	  elt = lookup (folded_arg0,
3452			HASH (folded_arg0, GET_MODE (folded_arg0)),
3453			GET_MODE (folded_arg0));
3454
3455	  if (elt)
3456	    elt = elt->first_same_value;
3457
3458	  for (; elt; elt = elt->next_same_value)
3459	    {
3460	      enum rtx_code eltcode = GET_CODE (elt->exp);
3461
3462	      /* Just check for unary and binary operations.  */
3463	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3464		  && GET_CODE (elt->exp) != SIGN_EXTEND
3465		  && GET_CODE (elt->exp) != ZERO_EXTEND
3466		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3467		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3468		{
3469		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3470
3471		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3472		    op0 = fold_rtx (op0, NULL_RTX);
3473
3474		  op0 = equiv_constant (op0);
3475		  if (op0)
3476		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3477						    op0, mode);
3478		}
3479	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3480			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3481		       && eltcode != DIV && eltcode != MOD
3482		       && eltcode != UDIV && eltcode != UMOD
3483		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3484		       && eltcode != ROTATE && eltcode != ROTATERT
3485		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3486			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3487				== mode))
3488			   || CONSTANT_P (XEXP (elt->exp, 0)))
3489		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3490			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3491				== mode))
3492			   || CONSTANT_P (XEXP (elt->exp, 1))))
3493		{
3494		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3495		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3496
3497		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3498		    op0 = fold_rtx (op0, NULL_RTX);
3499
3500		  if (op0)
3501		    op0 = equiv_constant (op0);
3502
3503		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3504		    op1 = fold_rtx (op1, NULL_RTX);
3505
3506		  if (op1)
3507		    op1 = equiv_constant (op1);
3508
3509		  /* If we are looking for the low SImode part of
3510		     (ashift:DI c (const_int 32)), it doesn't work
3511		     to compute that in SImode, because a 32-bit shift
3512		     in SImode is unpredictable.  We know the value is 0.  */
3513		  if (op0 && op1
3514		      && GET_CODE (elt->exp) == ASHIFT
3515		      && GET_CODE (op1) == CONST_INT
3516		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3517		    {
3518		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3519
3520			/* If the count fits in the inner mode's width,
3521			   but exceeds the outer mode's width,
3522			   the value will get truncated to 0
3523			   by the subreg.  */
3524			new = const0_rtx;
3525		      else
3526			/* If the count exceeds even the inner mode's width,
3527			   don't fold this expression.  */
3528			new = 0;
3529		    }
3530		  else if (op0 && op1)
3531		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3532						     op0, op1);
3533		}
3534
3535	      else if (GET_CODE (elt->exp) == SUBREG
3536		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
3537		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3538			   <= UNITS_PER_WORD)
3539		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3540		new = copy_rtx (SUBREG_REG (elt->exp));
3541
3542	      if (new)
3543		return new;
3544	    }
3545	}
3546
3547      return x;
3548
3549    case NOT:
3550    case NEG:
3551      /* If we have (NOT Y), see if Y is known to be (NOT Z).
3552	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3553      new = lookup_as_function (XEXP (x, 0), code);
3554      if (new)
3555	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3556      break;
3557
3558    case MEM:
3559      /* If we are not actually processing an insn, don't try to find the
3560	 best address.  Not only don't we care, but we could modify the
3561	 MEM in an invalid way since we have no insn to validate against.  */
3562      if (insn != 0)
3563	find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3564
3565      {
3566	/* Even if we don't fold in the insn itself,
3567	   we can safely do so here, in hopes of getting a constant.  */
3568	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3569	rtx base = 0;
3570	HOST_WIDE_INT offset = 0;
3571
3572	if (GET_CODE (addr) == REG
3573	    && REGNO_QTY_VALID_P (REGNO (addr)))
3574	  {
3575	    int addr_q = REG_QTY (REGNO (addr));
3576	    struct qty_table_elem *addr_ent = &qty_table[addr_q];
3577
3578	    if (GET_MODE (addr) == addr_ent->mode
3579		&& addr_ent->const_rtx != NULL_RTX)
3580	      addr = addr_ent->const_rtx;
3581	  }
3582
3583	/* If address is constant, split it into a base and integer offset.  */
3584	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3585	  base = addr;
3586	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3587		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3588	  {
3589	    base = XEXP (XEXP (addr, 0), 0);
3590	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3591	  }
3592	else if (GET_CODE (addr) == LO_SUM
3593		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3594	  base = XEXP (addr, 1);
3595	else if (GET_CODE (addr) == ADDRESSOF)
3596	  return change_address (x, VOIDmode, addr);
3597
3598	/* If this is a constant pool reference, we can fold it into its
3599	   constant to allow better value tracking.  */
3600	if (base && GET_CODE (base) == SYMBOL_REF
3601	    && CONSTANT_POOL_ADDRESS_P (base))
3602	  {
3603	    rtx constant = get_pool_constant (base);
3604	    enum machine_mode const_mode = get_pool_mode (base);
3605	    rtx new;
3606
3607	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3608	      constant_pool_entries_cost = COST (constant);
3609
3610	    /* If we are loading the full constant, we have an equivalence.  */
3611	    if (offset == 0 && mode == const_mode)
3612	      return constant;
3613
3614	    /* If this actually isn't a constant (weird!), we can't do
3615	       anything.  Otherwise, handle the two most common cases:
3616	       extracting a word from a multi-word constant, and extracting
3617	       the low-order bits.  Other cases don't seem common enough to
3618	       worry about.  */
3619	    if (! CONSTANT_P (constant))
3620	      return x;
3621
3622	    if (GET_MODE_CLASS (mode) == MODE_INT
3623		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
3624		&& offset % UNITS_PER_WORD == 0
3625		&& (new = operand_subword (constant,
3626					   offset / UNITS_PER_WORD,
3627					   0, const_mode)) != 0)
3628	      return new;
3629
3630	    if (((BYTES_BIG_ENDIAN
3631		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3632		 || (! BYTES_BIG_ENDIAN && offset == 0))
3633		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
3634	      return new;
3635	  }
3636
3637	/* If this is a reference to a label at a known position in a jump
3638	   table, we also know its value.  */
3639	if (base && GET_CODE (base) == LABEL_REF)
3640	  {
3641	    rtx label = XEXP (base, 0);
3642	    rtx table_insn = NEXT_INSN (label);
3643
3644	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3645		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3646	      {
3647		rtx table = PATTERN (table_insn);
3648
3649		if (offset >= 0
3650		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3651			< XVECLEN (table, 0)))
3652		  return XVECEXP (table, 0,
3653				  offset / GET_MODE_SIZE (GET_MODE (table)));
3654	      }
3655	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3656		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3657	      {
3658		rtx table = PATTERN (table_insn);
3659
3660		if (offset >= 0
3661		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3662			< XVECLEN (table, 1)))
3663		  {
3664		    offset /= GET_MODE_SIZE (GET_MODE (table));
3665		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3666					 XEXP (table, 0));
3667
3668		    if (GET_MODE (table) != Pmode)
3669		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3670
3671		    /* Indicate this is a constant.  This isn't a
3672		       valid form of CONST, but it will only be used
3673		       to fold the next insns and then discarded, so
3674		       it should be safe.
3675
3676		       Note this expression must be explicitly discarded,
3677		       by cse_insn, else it may end up in a REG_EQUAL note
3678		       and "escape" to cause problems elsewhere.  */
3679		    return gen_rtx_CONST (GET_MODE (new), new);
3680		  }
3681	      }
3682	  }
3683
3684	return x;
3685      }
3686
3687#ifdef NO_FUNCTION_CSE
3688    case CALL:
3689      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3690	return x;
3691      break;
3692#endif
3693
3694    case ASM_OPERANDS:
3695      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3696	validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3697			 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3698      break;
3699
3700    default:
3701      break;
3702    }
3703
3704  const_arg0 = 0;
3705  const_arg1 = 0;
3706  const_arg2 = 0;
3707  mode_arg0 = VOIDmode;
3708
3709  /* Try folding our operands.
3710     Then see which ones have constant values known.  */
3711
3712  fmt = GET_RTX_FORMAT (code);
3713  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3714    if (fmt[i] == 'e')
3715      {
3716	rtx arg = XEXP (x, i);
3717	rtx folded_arg = arg, const_arg = 0;
3718	enum machine_mode mode_arg = GET_MODE (arg);
3719	rtx cheap_arg, expensive_arg;
3720	rtx replacements[2];
3721	int j;
3722
3723	/* Most arguments are cheap, so handle them specially.  */
3724	switch (GET_CODE (arg))
3725	  {
3726	  case REG:
3727	    /* This is the same as calling equiv_constant; it is duplicated
3728	       here for speed.  */
3729	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3730	      {
3731		int arg_q = REG_QTY (REGNO (arg));
3732		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3733
3734		if (arg_ent->const_rtx != NULL_RTX
3735		    && GET_CODE (arg_ent->const_rtx) != REG
3736		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3737		  const_arg
3738		    = gen_lowpart_if_possible (GET_MODE (arg),
3739					       arg_ent->const_rtx);
3740	      }
3741	    break;
3742
3743	  case CONST:
3744	  case CONST_INT:
3745	  case SYMBOL_REF:
3746	  case LABEL_REF:
3747	  case CONST_DOUBLE:
3748	  case CONST_VECTOR:
3749	    const_arg = arg;
3750	    break;
3751
3752#ifdef HAVE_cc0
3753	  case CC0:
3754	    folded_arg = prev_insn_cc0;
3755	    mode_arg = prev_insn_cc0_mode;
3756	    const_arg = equiv_constant (folded_arg);
3757	    break;
3758#endif
3759
3760	  default:
3761	    folded_arg = fold_rtx (arg, insn);
3762	    const_arg = equiv_constant (folded_arg);
3763	  }
3764
3765	/* For the first three operands, see if the operand
3766	   is constant or equivalent to a constant.  */
3767	switch (i)
3768	  {
3769	  case 0:
3770	    folded_arg0 = folded_arg;
3771	    const_arg0 = const_arg;
3772	    mode_arg0 = mode_arg;
3773	    break;
3774	  case 1:
3775	    folded_arg1 = folded_arg;
3776	    const_arg1 = const_arg;
3777	    break;
3778	  case 2:
3779	    const_arg2 = const_arg;
3780	    break;
3781	  }
3782
3783	/* Pick the least expensive of the folded argument and an
3784	   equivalent constant argument.  */
3785	if (const_arg == 0 || const_arg == folded_arg
3786	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3787	  cheap_arg = folded_arg, expensive_arg = const_arg;
3788	else
3789	  cheap_arg = const_arg, expensive_arg = folded_arg;
3790
3791	/* Try to replace the operand with the cheapest of the two
3792	   possibilities.  If it doesn't work and this is either of the first
3793	   two operands of a commutative operation, try swapping them.
3794	   If THAT fails, try the more expensive, provided it is cheaper
3795	   than what is already there.  */
3796
3797	if (cheap_arg == XEXP (x, i))
3798	  continue;
3799
3800	if (insn == 0 && ! copied)
3801	  {
3802	    x = copy_rtx (x);
3803	    copied = 1;
3804	  }
3805
3806	/* Order the replacements from cheapest to most expensive.  */
3807	replacements[0] = cheap_arg;
3808	replacements[1] = expensive_arg;
3809
3810	for (j = 0; j < 2 && replacements[j];  j++)
3811	  {
3812	    int old_cost = COST_IN (XEXP (x, i), code);
3813	    int new_cost = COST_IN (replacements[j], code);
3814
3815	    /* Stop if what existed before was cheaper.  Prefer constants
3816	       in the case of a tie.  */
3817	    if (new_cost > old_cost
3818		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3819	      break;
3820
3821	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3822	      break;
3823
3824	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3825		|| code == LTGT || code == UNEQ || code == ORDERED
3826		|| code == UNORDERED)
3827	      {
3828		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3829		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3830
3831		if (apply_change_group ())
3832		  {
3833		    /* Swap them back to be invalid so that this loop can
3834		       continue and flag them to be swapped back later.  */
3835		    rtx tem;
3836
3837		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3838				       XEXP (x, 1) = tem;
3839		    must_swap = 1;
3840		    break;
3841		  }
3842	      }
3843	  }
3844      }
3845
3846    else
3847      {
3848	if (fmt[i] == 'E')
3849	  /* Don't try to fold inside of a vector of expressions.
3850	     Doing nothing is harmless.  */
3851	  {;}
3852      }
3853
3854  /* If a commutative operation, place a constant integer as the second
3855     operand unless the first operand is also a constant integer.  Otherwise,
3856     place any constant second unless the first operand is also a constant.  */
3857
3858  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3859      || code == LTGT || code == UNEQ || code == ORDERED
3860      || code == UNORDERED)
3861    {
3862      if (must_swap || (const_arg0
3863	  		&& (const_arg1 == 0
3864	      		    || (GET_CODE (const_arg0) == CONST_INT
3865			        && GET_CODE (const_arg1) != CONST_INT))))
3866	{
3867	  rtx tem = XEXP (x, 0);
3868
3869	  if (insn == 0 && ! copied)
3870	    {
3871	      x = copy_rtx (x);
3872	      copied = 1;
3873	    }
3874
3875	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3876	  validate_change (insn, &XEXP (x, 1), tem, 1);
3877	  if (apply_change_group ())
3878	    {
3879	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3880	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3881	    }
3882	}
3883    }
3884
3885  /* If X is an arithmetic operation, see if we can simplify it.  */
3886
3887  switch (GET_RTX_CLASS (code))
3888    {
3889    case '1':
3890      {
3891	int is_const = 0;
3892
3893	/* We can't simplify extension ops unless we know the
3894	   original mode.  */
3895	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3896	    && mode_arg0 == VOIDmode)
3897	  break;
3898
3899	/* If we had a CONST, strip it off and put it back later if we
3900	   fold.  */
3901	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3902	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3903
3904	new = simplify_unary_operation (code, mode,
3905					const_arg0 ? const_arg0 : folded_arg0,
3906					mode_arg0);
3907	if (new != 0 && is_const)
3908	  new = gen_rtx_CONST (mode, new);
3909      }
3910      break;
3911
3912    case '<':
3913      /* See what items are actually being compared and set FOLDED_ARG[01]
3914	 to those values and CODE to the actual comparison code.  If any are
3915	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3916	 do anything if both operands are already known to be constant.  */
3917
3918      if (const_arg0 == 0 || const_arg1 == 0)
3919	{
3920	  struct table_elt *p0, *p1;
3921	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3922	  enum machine_mode mode_arg1;
3923
3924#ifdef FLOAT_STORE_FLAG_VALUE
3925	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3926	    {
3927	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3928		      (FLOAT_STORE_FLAG_VALUE (mode), mode));
3929	      false_rtx = CONST0_RTX (mode);
3930	    }
3931#endif
3932
3933	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3934				       &mode_arg0, &mode_arg1);
3935	  const_arg0 = equiv_constant (folded_arg0);
3936	  const_arg1 = equiv_constant (folded_arg1);
3937
3938	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3939	     what kinds of things are being compared, so we can't do
3940	     anything with this comparison.  */
3941
3942	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3943	    break;
3944
3945	  /* If we do not now have two constants being compared, see
3946	     if we can nevertheless deduce some things about the
3947	     comparison.  */
3948	  if (const_arg0 == 0 || const_arg1 == 0)
3949	    {
3950	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
3951		 non-explicit constant?  These aren't zero, but we
3952		 don't know their sign.  */
3953	      if (const_arg1 == const0_rtx
3954		  && (NONZERO_BASE_PLUS_P (folded_arg0)
3955#if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3956	  come out as 0.  */
3957		      || GET_CODE (folded_arg0) == SYMBOL_REF
3958#endif
3959		      || GET_CODE (folded_arg0) == LABEL_REF
3960		      || GET_CODE (folded_arg0) == CONST))
3961		{
3962		  if (code == EQ)
3963		    return false_rtx;
3964		  else if (code == NE)
3965		    return true_rtx;
3966		}
3967
3968	      /* See if the two operands are the same.  */
3969
3970	      if (folded_arg0 == folded_arg1
3971		  || (GET_CODE (folded_arg0) == REG
3972		      && GET_CODE (folded_arg1) == REG
3973		      && (REG_QTY (REGNO (folded_arg0))
3974			  == REG_QTY (REGNO (folded_arg1))))
3975		  || ((p0 = lookup (folded_arg0,
3976				    (safe_hash (folded_arg0, mode_arg0)
3977				     & HASH_MASK), mode_arg0))
3978		      && (p1 = lookup (folded_arg1,
3979				       (safe_hash (folded_arg1, mode_arg0)
3980					& HASH_MASK), mode_arg0))
3981		      && p0->first_same_value == p1->first_same_value))
3982		{
3983		   /* Sadly two equal NaNs are not equivalent.  */
3984		   if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3985		       || ! FLOAT_MODE_P (mode_arg0)
3986		       || flag_unsafe_math_optimizations)
3987		      return ((code == EQ || code == LE || code == GE
3988			       || code == LEU || code == GEU || code == UNEQ
3989			       || code == UNLE || code == UNGE || code == ORDERED)
3990			      ? true_rtx : false_rtx);
3991		   /* Take care for the FP compares we can resolve.  */
3992		   if (code == UNEQ || code == UNLE || code == UNGE)
3993		     return true_rtx;
3994		   if (code == LTGT || code == LT || code == GT)
3995		     return false_rtx;
3996		}
3997
3998	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3999		 doing now is either the same as we did before or the reverse
4000		 (we only check the reverse if not floating-point).  */
4001	      else if (GET_CODE (folded_arg0) == REG)
4002		{
4003		  int qty = REG_QTY (REGNO (folded_arg0));
4004
4005		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4006		    {
4007		      struct qty_table_elem *ent = &qty_table[qty];
4008
4009		      if ((comparison_dominates_p (ent->comparison_code, code)
4010			   || (! FLOAT_MODE_P (mode_arg0)
4011			       && comparison_dominates_p (ent->comparison_code,
4012						          reverse_condition (code))))
4013			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
4014			      || (const_arg1
4015				  && rtx_equal_p (ent->comparison_const,
4016						  const_arg1))
4017			      || (GET_CODE (folded_arg1) == REG
4018				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4019			return (comparison_dominates_p (ent->comparison_code, code)
4020				? true_rtx : false_rtx);
4021		    }
4022		}
4023	    }
4024	}
4025
4026      /* If we are comparing against zero, see if the first operand is
4027	 equivalent to an IOR with a constant.  If so, we may be able to
4028	 determine the result of this comparison.  */
4029
4030      if (const_arg1 == const0_rtx)
4031	{
4032	  rtx y = lookup_as_function (folded_arg0, IOR);
4033	  rtx inner_const;
4034
4035	  if (y != 0
4036	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4037	      && GET_CODE (inner_const) == CONST_INT
4038	      && INTVAL (inner_const) != 0)
4039	    {
4040	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4041	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4042			      && (INTVAL (inner_const)
4043				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4044	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4045
4046#ifdef FLOAT_STORE_FLAG_VALUE
4047	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4048		{
4049		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4050			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4051		  false_rtx = CONST0_RTX (mode);
4052		}
4053#endif
4054
4055	      switch (code)
4056		{
4057		case EQ:
4058		  return false_rtx;
4059		case NE:
4060		  return true_rtx;
4061		case LT:  case LE:
4062		  if (has_sign)
4063		    return true_rtx;
4064		  break;
4065		case GT:  case GE:
4066		  if (has_sign)
4067		    return false_rtx;
4068		  break;
4069		default:
4070		  break;
4071		}
4072	    }
4073	}
4074
4075      new = simplify_relational_operation (code,
4076					   (mode_arg0 != VOIDmode
4077					    ? mode_arg0
4078					    : (GET_MODE (const_arg0
4079							 ? const_arg0
4080							 : folded_arg0)
4081					       != VOIDmode)
4082					    ? GET_MODE (const_arg0
4083							? const_arg0
4084							: folded_arg0)
4085					    : GET_MODE (const_arg1
4086							? const_arg1
4087							: folded_arg1)),
4088					   const_arg0 ? const_arg0 : folded_arg0,
4089					   const_arg1 ? const_arg1 : folded_arg1);
4090#ifdef FLOAT_STORE_FLAG_VALUE
4091      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4092	{
4093	  if (new == const0_rtx)
4094	    new = CONST0_RTX (mode);
4095	  else
4096	    new = (CONST_DOUBLE_FROM_REAL_VALUE
4097		   (FLOAT_STORE_FLAG_VALUE (mode), mode));
4098	}
4099#endif
4100      break;
4101
4102    case '2':
4103    case 'c':
4104      switch (code)
4105	{
4106	case PLUS:
4107	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4108	     with that LABEL_REF as its second operand.  If so, the result is
4109	     the first operand of that MINUS.  This handles switches with an
4110	     ADDR_DIFF_VEC table.  */
4111	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4112	    {
4113	      rtx y
4114		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4115		: lookup_as_function (folded_arg0, MINUS);
4116
4117	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4118		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4119		return XEXP (y, 0);
4120
4121	      /* Now try for a CONST of a MINUS like the above.  */
4122	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4123			: lookup_as_function (folded_arg0, CONST))) != 0
4124		  && GET_CODE (XEXP (y, 0)) == MINUS
4125		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4126		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4127		return XEXP (XEXP (y, 0), 0);
4128	    }
4129
4130	  /* Likewise if the operands are in the other order.  */
4131	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4132	    {
4133	      rtx y
4134		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4135		: lookup_as_function (folded_arg1, MINUS);
4136
4137	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4138		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4139		return XEXP (y, 0);
4140
4141	      /* Now try for a CONST of a MINUS like the above.  */
4142	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4143			: lookup_as_function (folded_arg1, CONST))) != 0
4144		  && GET_CODE (XEXP (y, 0)) == MINUS
4145		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4146		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4147		return XEXP (XEXP (y, 0), 0);
4148	    }
4149
4150	  /* If second operand is a register equivalent to a negative
4151	     CONST_INT, see if we can find a register equivalent to the
4152	     positive constant.  Make a MINUS if so.  Don't do this for
4153	     a non-negative constant since we might then alternate between
4154	     choosing positive and negative constants.  Having the positive
4155	     constant previously-used is the more common case.  Be sure
4156	     the resulting constant is non-negative; if const_arg1 were
4157	     the smallest negative number this would overflow: depending
4158	     on the mode, this would either just be the same value (and
4159	     hence not save anything) or be incorrect.  */
4160	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4161	      && INTVAL (const_arg1) < 0
4162	      /* This used to test
4163
4164	         -INTVAL (const_arg1) >= 0
4165
4166		 But The Sun V5.0 compilers mis-compiled that test.  So
4167		 instead we test for the problematic value in a more direct
4168		 manner and hope the Sun compilers get it correct.  */
4169	      && INTVAL (const_arg1) !=
4170	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4171	      && GET_CODE (folded_arg1) == REG)
4172	    {
4173	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4174	      struct table_elt *p
4175		= lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4176			  mode);
4177
4178	      if (p)
4179		for (p = p->first_same_value; p; p = p->next_same_value)
4180		  if (GET_CODE (p->exp) == REG)
4181		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4182						canon_reg (p->exp, NULL_RTX));
4183	    }
4184	  goto from_plus;
4185
4186	case MINUS:
4187	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4188	     If so, produce (PLUS Z C2-C).  */
4189	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4190	    {
4191	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4192	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4193		return fold_rtx (plus_constant (copy_rtx (y),
4194						-INTVAL (const_arg1)),
4195				 NULL_RTX);
4196	    }
4197
4198	  /* Fall through.  */
4199
4200	from_plus:
4201	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4202	case IOR:     case AND:       case XOR:
4203	case MULT:    case DIV:       case UDIV:
4204	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4205	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4206	     is known to be of similar form, we may be able to replace the
4207	     operation with a combined operation.  This may eliminate the
4208	     intermediate operation if every use is simplified in this way.
4209	     Note that the similar optimization done by combine.c only works
4210	     if the intermediate operation's result has only one reference.  */
4211
4212	  if (GET_CODE (folded_arg0) == REG
4213	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4214	    {
4215	      int is_shift
4216		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4217	      rtx y = lookup_as_function (folded_arg0, code);
4218	      rtx inner_const;
4219	      enum rtx_code associate_code;
4220	      rtx new_const;
4221
4222	      if (y == 0
4223		  || 0 == (inner_const
4224			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4225		  || GET_CODE (inner_const) != CONST_INT
4226		  /* If we have compiled a statement like
4227		     "if (x == (x & mask1))", and now are looking at
4228		     "x & mask2", we will have a case where the first operand
4229		     of Y is the same as our first operand.  Unless we detect
4230		     this case, an infinite loop will result.  */
4231		  || XEXP (y, 0) == folded_arg0)
4232		break;
4233
4234	      /* Don't associate these operations if they are a PLUS with the
4235		 same constant and it is a power of two.  These might be doable
4236		 with a pre- or post-increment.  Similarly for two subtracts of
4237		 identical powers of two with post decrement.  */
4238
4239	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4240		  && ((HAVE_PRE_INCREMENT
4241			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4242		      || (HAVE_POST_INCREMENT
4243			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4244		      || (HAVE_PRE_DECREMENT
4245			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4246		      || (HAVE_POST_DECREMENT
4247			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4248		break;
4249
4250	      /* Compute the code used to compose the constants.  For example,
4251		 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT.  */
4252
4253	      associate_code
4254		= (code == MULT || code == DIV || code == UDIV ? MULT
4255		   : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4256
4257	      new_const = simplify_binary_operation (associate_code, mode,
4258						     const_arg1, inner_const);
4259
4260	      if (new_const == 0)
4261		break;
4262
4263	      /* If we are associating shift operations, don't let this
4264		 produce a shift of the size of the object or larger.
4265		 This could occur when we follow a sign-extend by a right
4266		 shift on a machine that does a sign-extend as a pair
4267		 of shifts.  */
4268
4269	      if (is_shift && GET_CODE (new_const) == CONST_INT
4270		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4271		{
4272		  /* As an exception, we can turn an ASHIFTRT of this
4273		     form into a shift of the number of bits - 1.  */
4274		  if (code == ASHIFTRT)
4275		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4276		  else
4277		    break;
4278		}
4279
4280	      y = copy_rtx (XEXP (y, 0));
4281
4282	      /* If Y contains our first operand (the most common way this
4283		 can happen is if Y is a MEM), we would do into an infinite
4284		 loop if we tried to fold it.  So don't in that case.  */
4285
4286	      if (! reg_mentioned_p (folded_arg0, y))
4287		y = fold_rtx (y, insn);
4288
4289	      return simplify_gen_binary (code, mode, y, new_const);
4290	    }
4291	  break;
4292
4293	default:
4294	  break;
4295	}
4296
4297      new = simplify_binary_operation (code, mode,
4298				       const_arg0 ? const_arg0 : folded_arg0,
4299				       const_arg1 ? const_arg1 : folded_arg1);
4300      break;
4301
4302    case 'o':
4303      /* (lo_sum (high X) X) is simply X.  */
4304      if (code == LO_SUM && const_arg0 != 0
4305	  && GET_CODE (const_arg0) == HIGH
4306	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4307	return const_arg1;
4308      break;
4309
4310    case '3':
4311    case 'b':
4312      new = simplify_ternary_operation (code, mode, mode_arg0,
4313					const_arg0 ? const_arg0 : folded_arg0,
4314					const_arg1 ? const_arg1 : folded_arg1,
4315					const_arg2 ? const_arg2 : XEXP (x, 2));
4316      break;
4317
4318    case 'x':
4319      /* Always eliminate CONSTANT_P_RTX at this stage.  */
4320      if (code == CONSTANT_P_RTX)
4321	return (const_arg0 ? const1_rtx : const0_rtx);
4322      break;
4323    }
4324
4325  return new ? new : x;
4326}
4327
4328/* Return a constant value currently equivalent to X.
4329   Return 0 if we don't know one.  */
4330
4331static rtx
4332equiv_constant (x)
4333     rtx x;
4334{
4335  if (GET_CODE (x) == REG
4336      && REGNO_QTY_VALID_P (REGNO (x)))
4337    {
4338      int x_q = REG_QTY (REGNO (x));
4339      struct qty_table_elem *x_ent = &qty_table[x_q];
4340
4341      if (x_ent->const_rtx)
4342	x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4343    }
4344
4345  if (x == 0 || CONSTANT_P (x))
4346    return x;
4347
4348  /* If X is a MEM, try to fold it outside the context of any insn to see if
4349     it might be equivalent to a constant.  That handles the case where it
4350     is a constant-pool reference.  Then try to look it up in the hash table
4351     in case it is something whose value we have seen before.  */
4352
4353  if (GET_CODE (x) == MEM)
4354    {
4355      struct table_elt *elt;
4356
4357      x = fold_rtx (x, NULL_RTX);
4358      if (CONSTANT_P (x))
4359	return x;
4360
4361      elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4362      if (elt == 0)
4363	return 0;
4364
4365      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4366	if (elt->is_const && CONSTANT_P (elt->exp))
4367	  return elt->exp;
4368    }
4369
4370  return 0;
4371}
4372
4373/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4374   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4375   least-significant part of X.
4376   MODE specifies how big a part of X to return.
4377
4378   If the requested operation cannot be done, 0 is returned.
4379
4380   This is similar to gen_lowpart in emit-rtl.c.  */
4381
4382rtx
4383gen_lowpart_if_possible (mode, x)
4384     enum machine_mode mode;
4385     rtx x;
4386{
4387  rtx result = gen_lowpart_common (mode, x);
4388
4389  if (result)
4390    return result;
4391  else if (GET_CODE (x) == MEM)
4392    {
4393      /* This is the only other case we handle.  */
4394      int offset = 0;
4395      rtx new;
4396
4397      if (WORDS_BIG_ENDIAN)
4398	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4399		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4400      if (BYTES_BIG_ENDIAN)
4401	/* Adjust the address so that the address-after-the-data is
4402	   unchanged.  */
4403	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4404		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4405
4406      new = adjust_address_nv (x, mode, offset);
4407      if (! memory_address_p (mode, XEXP (new, 0)))
4408	return 0;
4409
4410      return new;
4411    }
4412  else
4413    return 0;
4414}
4415
4416/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4417   branch.  It will be zero if not.
4418
4419   In certain cases, this can cause us to add an equivalence.  For example,
4420   if we are following the taken case of
4421   	if (i == 2)
4422   we can add the fact that `i' and '2' are now equivalent.
4423
4424   In any case, we can record that this comparison was passed.  If the same
4425   comparison is seen later, we will know its value.  */
4426
4427static void
4428record_jump_equiv (insn, taken)
4429     rtx insn;
4430     int taken;
4431{
4432  int cond_known_true;
4433  rtx op0, op1;
4434  rtx set;
4435  enum machine_mode mode, mode0, mode1;
4436  int reversed_nonequality = 0;
4437  enum rtx_code code;
4438
4439  /* Ensure this is the right kind of insn.  */
4440  if (! any_condjump_p (insn))
4441    return;
4442  set = pc_set (insn);
4443
4444  /* See if this jump condition is known true or false.  */
4445  if (taken)
4446    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4447  else
4448    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4449
4450  /* Get the type of comparison being done and the operands being compared.
4451     If we had to reverse a non-equality condition, record that fact so we
4452     know that it isn't valid for floating-point.  */
4453  code = GET_CODE (XEXP (SET_SRC (set), 0));
4454  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4455  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4456
4457  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4458  if (! cond_known_true)
4459    {
4460      code = reversed_comparison_code_parts (code, op0, op1, insn);
4461
4462      /* Don't remember if we can't find the inverse.  */
4463      if (code == UNKNOWN)
4464	return;
4465    }
4466
4467  /* The mode is the mode of the non-constant.  */
4468  mode = mode0;
4469  if (mode1 != VOIDmode)
4470    mode = mode1;
4471
4472  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4473}
4474
4475/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4476   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4477   Make any useful entries we can with that information.  Called from
4478   above function and called recursively.  */
4479
4480static void
4481record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4482     enum rtx_code code;
4483     enum machine_mode mode;
4484     rtx op0, op1;
4485     int reversed_nonequality;
4486{
4487  unsigned op0_hash, op1_hash;
4488  int op0_in_memory, op1_in_memory;
4489  struct table_elt *op0_elt, *op1_elt;
4490
4491  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4492     we know that they are also equal in the smaller mode (this is also
4493     true for all smaller modes whether or not there is a SUBREG, but
4494     is not worth testing for with no SUBREG).  */
4495
4496  /* Note that GET_MODE (op0) may not equal MODE.  */
4497  if (code == EQ && GET_CODE (op0) == SUBREG
4498      && (GET_MODE_SIZE (GET_MODE (op0))
4499	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4500    {
4501      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4502      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4503
4504      record_jump_cond (code, mode, SUBREG_REG (op0),
4505			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4506			reversed_nonequality);
4507    }
4508
4509  if (code == EQ && GET_CODE (op1) == SUBREG
4510      && (GET_MODE_SIZE (GET_MODE (op1))
4511	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4512    {
4513      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4514      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4515
4516      record_jump_cond (code, mode, SUBREG_REG (op1),
4517			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4518			reversed_nonequality);
4519    }
4520
4521  /* Similarly, if this is an NE comparison, and either is a SUBREG
4522     making a smaller mode, we know the whole thing is also NE.  */
4523
4524  /* Note that GET_MODE (op0) may not equal MODE;
4525     if we test MODE instead, we can get an infinite recursion
4526     alternating between two modes each wider than MODE.  */
4527
4528  if (code == NE && GET_CODE (op0) == SUBREG
4529      && subreg_lowpart_p (op0)
4530      && (GET_MODE_SIZE (GET_MODE (op0))
4531	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4532    {
4533      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4534      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4535
4536      record_jump_cond (code, mode, SUBREG_REG (op0),
4537			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4538			reversed_nonequality);
4539    }
4540
4541  if (code == NE && GET_CODE (op1) == SUBREG
4542      && subreg_lowpart_p (op1)
4543      && (GET_MODE_SIZE (GET_MODE (op1))
4544	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4545    {
4546      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4547      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4548
4549      record_jump_cond (code, mode, SUBREG_REG (op1),
4550			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4551			reversed_nonequality);
4552    }
4553
4554  /* Hash both operands.  */
4555
4556  do_not_record = 0;
4557  hash_arg_in_memory = 0;
4558  op0_hash = HASH (op0, mode);
4559  op0_in_memory = hash_arg_in_memory;
4560
4561  if (do_not_record)
4562    return;
4563
4564  do_not_record = 0;
4565  hash_arg_in_memory = 0;
4566  op1_hash = HASH (op1, mode);
4567  op1_in_memory = hash_arg_in_memory;
4568
4569  if (do_not_record)
4570    return;
4571
4572  /* Look up both operands.  */
4573  op0_elt = lookup (op0, op0_hash, mode);
4574  op1_elt = lookup (op1, op1_hash, mode);
4575
4576  /* If both operands are already equivalent or if they are not in the
4577     table but are identical, do nothing.  */
4578  if ((op0_elt != 0 && op1_elt != 0
4579       && op0_elt->first_same_value == op1_elt->first_same_value)
4580      || op0 == op1 || rtx_equal_p (op0, op1))
4581    return;
4582
4583  /* If we aren't setting two things equal all we can do is save this
4584     comparison.   Similarly if this is floating-point.  In the latter
4585     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4586     If we record the equality, we might inadvertently delete code
4587     whose intent was to change -0 to +0.  */
4588
4589  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4590    {
4591      struct qty_table_elem *ent;
4592      int qty;
4593
4594      /* If we reversed a floating-point comparison, if OP0 is not a
4595	 register, or if OP1 is neither a register or constant, we can't
4596	 do anything.  */
4597
4598      if (GET_CODE (op1) != REG)
4599	op1 = equiv_constant (op1);
4600
4601      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4602	  || GET_CODE (op0) != REG || op1 == 0)
4603	return;
4604
4605      /* Put OP0 in the hash table if it isn't already.  This gives it a
4606	 new quantity number.  */
4607      if (op0_elt == 0)
4608	{
4609	  if (insert_regs (op0, NULL, 0))
4610	    {
4611	      rehash_using_reg (op0);
4612	      op0_hash = HASH (op0, mode);
4613
4614	      /* If OP0 is contained in OP1, this changes its hash code
4615		 as well.  Faster to rehash than to check, except
4616		 for the simple case of a constant.  */
4617	      if (! CONSTANT_P (op1))
4618		op1_hash = HASH (op1,mode);
4619	    }
4620
4621	  op0_elt = insert (op0, NULL, op0_hash, mode);
4622	  op0_elt->in_memory = op0_in_memory;
4623	}
4624
4625      qty = REG_QTY (REGNO (op0));
4626      ent = &qty_table[qty];
4627
4628      ent->comparison_code = code;
4629      if (GET_CODE (op1) == REG)
4630	{
4631	  /* Look it up again--in case op0 and op1 are the same.  */
4632	  op1_elt = lookup (op1, op1_hash, mode);
4633
4634	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4635	  if (op1_elt == 0)
4636	    {
4637	      if (insert_regs (op1, NULL, 0))
4638		{
4639		  rehash_using_reg (op1);
4640		  op1_hash = HASH (op1, mode);
4641		}
4642
4643	      op1_elt = insert (op1, NULL, op1_hash, mode);
4644	      op1_elt->in_memory = op1_in_memory;
4645	    }
4646
4647	  ent->comparison_const = NULL_RTX;
4648	  ent->comparison_qty = REG_QTY (REGNO (op1));
4649	}
4650      else
4651	{
4652	  ent->comparison_const = op1;
4653	  ent->comparison_qty = -1;
4654	}
4655
4656      return;
4657    }
4658
4659  /* If either side is still missing an equivalence, make it now,
4660     then merge the equivalences.  */
4661
4662  if (op0_elt == 0)
4663    {
4664      if (insert_regs (op0, NULL, 0))
4665	{
4666	  rehash_using_reg (op0);
4667	  op0_hash = HASH (op0, mode);
4668	}
4669
4670      op0_elt = insert (op0, NULL, op0_hash, mode);
4671      op0_elt->in_memory = op0_in_memory;
4672    }
4673
4674  if (op1_elt == 0)
4675    {
4676      if (insert_regs (op1, NULL, 0))
4677	{
4678	  rehash_using_reg (op1);
4679	  op1_hash = HASH (op1, mode);
4680	}
4681
4682      op1_elt = insert (op1, NULL, op1_hash, mode);
4683      op1_elt->in_memory = op1_in_memory;
4684    }
4685
4686  merge_equiv_classes (op0_elt, op1_elt);
4687  last_jump_equiv_class = op0_elt;
4688}
4689
4690/* CSE processing for one instruction.
4691   First simplify sources and addresses of all assignments
4692   in the instruction, using previously-computed equivalents values.
4693   Then install the new sources and destinations in the table
4694   of available values.
4695
4696   If LIBCALL_INSN is nonzero, don't record any equivalence made in
4697   the insn.  It means that INSN is inside libcall block.  In this
4698   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4699
4700/* Data on one SET contained in the instruction.  */
4701
4702struct set
4703{
4704  /* The SET rtx itself.  */
4705  rtx rtl;
4706  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4707  rtx src;
4708  /* The hash-table element for the SET_SRC of the SET.  */
4709  struct table_elt *src_elt;
4710  /* Hash value for the SET_SRC.  */
4711  unsigned src_hash;
4712  /* Hash value for the SET_DEST.  */
4713  unsigned dest_hash;
4714  /* The SET_DEST, with SUBREG, etc., stripped.  */
4715  rtx inner_dest;
4716  /* Nonzero if the SET_SRC is in memory.  */
4717  char src_in_memory;
4718  /* Nonzero if the SET_SRC contains something
4719     whose value cannot be predicted and understood.  */
4720  char src_volatile;
4721  /* Original machine mode, in case it becomes a CONST_INT.  */
4722  enum machine_mode mode;
4723  /* A constant equivalent for SET_SRC, if any.  */
4724  rtx src_const;
4725  /* Original SET_SRC value used for libcall notes.  */
4726  rtx orig_src;
4727  /* Hash value of constant equivalent for SET_SRC.  */
4728  unsigned src_const_hash;
4729  /* Table entry for constant equivalent for SET_SRC, if any.  */
4730  struct table_elt *src_const_elt;
4731};
4732
4733static void
4734cse_insn (insn, libcall_insn)
4735     rtx insn;
4736     rtx libcall_insn;
4737{
4738  rtx x = PATTERN (insn);
4739  int i;
4740  rtx tem;
4741  int n_sets = 0;
4742
4743#ifdef HAVE_cc0
4744  /* Records what this insn does to set CC0.  */
4745  rtx this_insn_cc0 = 0;
4746  enum machine_mode this_insn_cc0_mode = VOIDmode;
4747#endif
4748
4749  rtx src_eqv = 0;
4750  struct table_elt *src_eqv_elt = 0;
4751  int src_eqv_volatile = 0;
4752  int src_eqv_in_memory = 0;
4753  unsigned src_eqv_hash = 0;
4754
4755  struct set *sets = (struct set *) 0;
4756
4757  this_insn = insn;
4758
4759  /* Find all the SETs and CLOBBERs in this instruction.
4760     Record all the SETs in the array `set' and count them.
4761     Also determine whether there is a CLOBBER that invalidates
4762     all memory references, or all references at varying addresses.  */
4763
4764  if (GET_CODE (insn) == CALL_INSN)
4765    {
4766      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4767	{
4768	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4769	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4770	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4771	}
4772    }
4773
4774  if (GET_CODE (x) == SET)
4775    {
4776      sets = (struct set *) alloca (sizeof (struct set));
4777      sets[0].rtl = x;
4778
4779      /* Ignore SETs that are unconditional jumps.
4780	 They never need cse processing, so this does not hurt.
4781	 The reason is not efficiency but rather
4782	 so that we can test at the end for instructions
4783	 that have been simplified to unconditional jumps
4784	 and not be misled by unchanged instructions
4785	 that were unconditional jumps to begin with.  */
4786      if (SET_DEST (x) == pc_rtx
4787	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4788	;
4789
4790      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4791	 The hard function value register is used only once, to copy to
4792	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4793	 Ensure we invalidate the destination register.  On the 80386 no
4794	 other code would invalidate it since it is a fixed_reg.
4795	 We need not check the return of apply_change_group; see canon_reg.  */
4796
4797      else if (GET_CODE (SET_SRC (x)) == CALL)
4798	{
4799	  canon_reg (SET_SRC (x), insn);
4800	  apply_change_group ();
4801	  fold_rtx (SET_SRC (x), insn);
4802	  invalidate (SET_DEST (x), VOIDmode);
4803	}
4804      else
4805	n_sets = 1;
4806    }
4807  else if (GET_CODE (x) == PARALLEL)
4808    {
4809      int lim = XVECLEN (x, 0);
4810
4811      sets = (struct set *) alloca (lim * sizeof (struct set));
4812
4813      /* Find all regs explicitly clobbered in this insn,
4814	 and ensure they are not replaced with any other regs
4815	 elsewhere in this insn.
4816	 When a reg that is clobbered is also used for input,
4817	 we should presume that that is for a reason,
4818	 and we should not substitute some other register
4819	 which is not supposed to be clobbered.
4820	 Therefore, this loop cannot be merged into the one below
4821	 because a CALL may precede a CLOBBER and refer to the
4822	 value clobbered.  We must not let a canonicalization do
4823	 anything in that case.  */
4824      for (i = 0; i < lim; i++)
4825	{
4826	  rtx y = XVECEXP (x, 0, i);
4827	  if (GET_CODE (y) == CLOBBER)
4828	    {
4829	      rtx clobbered = XEXP (y, 0);
4830
4831	      if (GET_CODE (clobbered) == REG
4832		  || GET_CODE (clobbered) == SUBREG)
4833		invalidate (clobbered, VOIDmode);
4834	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4835		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4836		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4837	    }
4838	}
4839
4840      for (i = 0; i < lim; i++)
4841	{
4842	  rtx y = XVECEXP (x, 0, i);
4843	  if (GET_CODE (y) == SET)
4844	    {
4845	      /* As above, we ignore unconditional jumps and call-insns and
4846		 ignore the result of apply_change_group.  */
4847	      if (GET_CODE (SET_SRC (y)) == CALL)
4848		{
4849		  canon_reg (SET_SRC (y), insn);
4850		  apply_change_group ();
4851		  fold_rtx (SET_SRC (y), insn);
4852		  invalidate (SET_DEST (y), VOIDmode);
4853		}
4854	      else if (SET_DEST (y) == pc_rtx
4855		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4856		;
4857	      else
4858		sets[n_sets++].rtl = y;
4859	    }
4860	  else if (GET_CODE (y) == CLOBBER)
4861	    {
4862	      /* If we clobber memory, canon the address.
4863		 This does nothing when a register is clobbered
4864		 because we have already invalidated the reg.  */
4865	      if (GET_CODE (XEXP (y, 0)) == MEM)
4866		canon_reg (XEXP (y, 0), NULL_RTX);
4867	    }
4868	  else if (GET_CODE (y) == USE
4869		   && ! (GET_CODE (XEXP (y, 0)) == REG
4870			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4871	    canon_reg (y, NULL_RTX);
4872	  else if (GET_CODE (y) == CALL)
4873	    {
4874	      /* The result of apply_change_group can be ignored; see
4875		 canon_reg.  */
4876	      canon_reg (y, insn);
4877	      apply_change_group ();
4878	      fold_rtx (y, insn);
4879	    }
4880	}
4881    }
4882  else if (GET_CODE (x) == CLOBBER)
4883    {
4884      if (GET_CODE (XEXP (x, 0)) == MEM)
4885	canon_reg (XEXP (x, 0), NULL_RTX);
4886    }
4887
4888  /* Canonicalize a USE of a pseudo register or memory location.  */
4889  else if (GET_CODE (x) == USE
4890	   && ! (GET_CODE (XEXP (x, 0)) == REG
4891		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4892    canon_reg (XEXP (x, 0), NULL_RTX);
4893  else if (GET_CODE (x) == CALL)
4894    {
4895      /* The result of apply_change_group can be ignored; see canon_reg.  */
4896      canon_reg (x, insn);
4897      apply_change_group ();
4898      fold_rtx (x, insn);
4899    }
4900
4901  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4902     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4903     is handled specially for this case, and if it isn't set, then there will
4904     be no equivalence for the destination.  */
4905  if (n_sets == 1 && REG_NOTES (insn) != 0
4906      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4907      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4908	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4909    src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4910
4911  /* Canonicalize sources and addresses of destinations.
4912     We do this in a separate pass to avoid problems when a MATCH_DUP is
4913     present in the insn pattern.  In that case, we want to ensure that
4914     we don't break the duplicate nature of the pattern.  So we will replace
4915     both operands at the same time.  Otherwise, we would fail to find an
4916     equivalent substitution in the loop calling validate_change below.
4917
4918     We used to suppress canonicalization of DEST if it appears in SRC,
4919     but we don't do this any more.  */
4920
4921  for (i = 0; i < n_sets; i++)
4922    {
4923      rtx dest = SET_DEST (sets[i].rtl);
4924      rtx src = SET_SRC (sets[i].rtl);
4925      rtx new = canon_reg (src, insn);
4926      int insn_code;
4927
4928      sets[i].orig_src = src;
4929      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4930	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4931	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4932	  || (insn_code = recog_memoized (insn)) < 0
4933	  || insn_data[insn_code].n_dups > 0)
4934	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4935      else
4936	SET_SRC (sets[i].rtl) = new;
4937
4938      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4939	{
4940	  validate_change (insn, &XEXP (dest, 1),
4941			   canon_reg (XEXP (dest, 1), insn), 1);
4942	  validate_change (insn, &XEXP (dest, 2),
4943			   canon_reg (XEXP (dest, 2), insn), 1);
4944	}
4945
4946      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4947	     || GET_CODE (dest) == ZERO_EXTRACT
4948	     || GET_CODE (dest) == SIGN_EXTRACT)
4949	dest = XEXP (dest, 0);
4950
4951      if (GET_CODE (dest) == MEM)
4952	canon_reg (dest, insn);
4953    }
4954
4955  /* Now that we have done all the replacements, we can apply the change
4956     group and see if they all work.  Note that this will cause some
4957     canonicalizations that would have worked individually not to be applied
4958     because some other canonicalization didn't work, but this should not
4959     occur often.
4960
4961     The result of apply_change_group can be ignored; see canon_reg.  */
4962
4963  apply_change_group ();
4964
4965  /* Set sets[i].src_elt to the class each source belongs to.
4966     Detect assignments from or to volatile things
4967     and set set[i] to zero so they will be ignored
4968     in the rest of this function.
4969
4970     Nothing in this loop changes the hash table or the register chains.  */
4971
4972  for (i = 0; i < n_sets; i++)
4973    {
4974      rtx src, dest;
4975      rtx src_folded;
4976      struct table_elt *elt = 0, *p;
4977      enum machine_mode mode;
4978      rtx src_eqv_here;
4979      rtx src_const = 0;
4980      rtx src_related = 0;
4981      struct table_elt *src_const_elt = 0;
4982      int src_cost = MAX_COST;
4983      int src_eqv_cost = MAX_COST;
4984      int src_folded_cost = MAX_COST;
4985      int src_related_cost = MAX_COST;
4986      int src_elt_cost = MAX_COST;
4987      int src_regcost = MAX_COST;
4988      int src_eqv_regcost = MAX_COST;
4989      int src_folded_regcost = MAX_COST;
4990      int src_related_regcost = MAX_COST;
4991      int src_elt_regcost = MAX_COST;
4992      /* Set non-zero if we need to call force_const_mem on with the
4993	 contents of src_folded before using it.  */
4994      int src_folded_force_flag = 0;
4995
4996      dest = SET_DEST (sets[i].rtl);
4997      src = SET_SRC (sets[i].rtl);
4998
4999      /* If SRC is a constant that has no machine mode,
5000	 hash it with the destination's machine mode.
5001	 This way we can keep different modes separate.  */
5002
5003      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5004      sets[i].mode = mode;
5005
5006      if (src_eqv)
5007	{
5008	  enum machine_mode eqvmode = mode;
5009	  if (GET_CODE (dest) == STRICT_LOW_PART)
5010	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5011	  do_not_record = 0;
5012	  hash_arg_in_memory = 0;
5013	  src_eqv = fold_rtx (src_eqv, insn);
5014	  src_eqv_hash = HASH (src_eqv, eqvmode);
5015
5016	  /* Find the equivalence class for the equivalent expression.  */
5017
5018	  if (!do_not_record)
5019	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5020
5021	  src_eqv_volatile = do_not_record;
5022	  src_eqv_in_memory = hash_arg_in_memory;
5023	}
5024
5025      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5026	 value of the INNER register, not the destination.  So it is not
5027	 a valid substitution for the source.  But save it for later.  */
5028      if (GET_CODE (dest) == STRICT_LOW_PART)
5029	src_eqv_here = 0;
5030      else
5031	src_eqv_here = src_eqv;
5032
5033      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5034	 simplified result, which may not necessarily be valid.  */
5035      src_folded = fold_rtx (src, insn);
5036
5037#if 0
5038      /* ??? This caused bad code to be generated for the m68k port with -O2.
5039	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5040	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5041	 At the end we will add src and src_const to the same equivalence
5042	 class.  We now have 3 and -1 on the same equivalence class.  This
5043	 causes later instructions to be mis-optimized.  */
5044      /* If storing a constant in a bitfield, pre-truncate the constant
5045	 so we will be able to record it later.  */
5046      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5047	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5048	{
5049	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5050
5051	  if (GET_CODE (src) == CONST_INT
5052	      && GET_CODE (width) == CONST_INT
5053	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5054	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5055	    src_folded
5056	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5057					  << INTVAL (width)) - 1));
5058	}
5059#endif
5060
5061      /* Compute SRC's hash code, and also notice if it
5062	 should not be recorded at all.  In that case,
5063	 prevent any further processing of this assignment.  */
5064      do_not_record = 0;
5065      hash_arg_in_memory = 0;
5066
5067      sets[i].src = src;
5068      sets[i].src_hash = HASH (src, mode);
5069      sets[i].src_volatile = do_not_record;
5070      sets[i].src_in_memory = hash_arg_in_memory;
5071
5072      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5073	 a pseudo, do not record SRC.  Using SRC as a replacement for
5074	 anything else will be incorrect in that situation.  Note that
5075	 this usually occurs only for stack slots, in which case all the
5076	 RTL would be referring to SRC, so we don't lose any optimization
5077	 opportunities by not having SRC in the hash table.  */
5078
5079      if (GET_CODE (src) == MEM
5080	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5081	  && GET_CODE (dest) == REG
5082	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5083	sets[i].src_volatile = 1;
5084
5085#if 0
5086      /* It is no longer clear why we used to do this, but it doesn't
5087	 appear to still be needed.  So let's try without it since this
5088	 code hurts cse'ing widened ops.  */
5089      /* If source is a perverse subreg (such as QI treated as an SI),
5090	 treat it as volatile.  It may do the work of an SI in one context
5091	 where the extra bits are not being used, but cannot replace an SI
5092	 in general.  */
5093      if (GET_CODE (src) == SUBREG
5094	  && (GET_MODE_SIZE (GET_MODE (src))
5095	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5096	sets[i].src_volatile = 1;
5097#endif
5098
5099      /* Locate all possible equivalent forms for SRC.  Try to replace
5100         SRC in the insn with each cheaper equivalent.
5101
5102         We have the following types of equivalents: SRC itself, a folded
5103         version, a value given in a REG_EQUAL note, or a value related
5104	 to a constant.
5105
5106         Each of these equivalents may be part of an additional class
5107         of equivalents (if more than one is in the table, they must be in
5108         the same class; we check for this).
5109
5110	 If the source is volatile, we don't do any table lookups.
5111
5112         We note any constant equivalent for possible later use in a
5113         REG_NOTE.  */
5114
5115      if (!sets[i].src_volatile)
5116	elt = lookup (src, sets[i].src_hash, mode);
5117
5118      sets[i].src_elt = elt;
5119
5120      if (elt && src_eqv_here && src_eqv_elt)
5121	{
5122	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5123	    {
5124	      /* The REG_EQUAL is indicating that two formerly distinct
5125		 classes are now equivalent.  So merge them.  */
5126	      merge_equiv_classes (elt, src_eqv_elt);
5127	      src_eqv_hash = HASH (src_eqv, elt->mode);
5128	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5129	    }
5130
5131	  src_eqv_here = 0;
5132	}
5133
5134      else if (src_eqv_elt)
5135	elt = src_eqv_elt;
5136
5137      /* Try to find a constant somewhere and record it in `src_const'.
5138	 Record its table element, if any, in `src_const_elt'.  Look in
5139	 any known equivalences first.  (If the constant is not in the
5140	 table, also set `sets[i].src_const_hash').  */
5141      if (elt)
5142	for (p = elt->first_same_value; p; p = p->next_same_value)
5143	  if (p->is_const)
5144	    {
5145	      src_const = p->exp;
5146	      src_const_elt = elt;
5147	      break;
5148	    }
5149
5150      if (src_const == 0
5151	  && (CONSTANT_P (src_folded)
5152	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5153		 "constant" here so we will record it. This allows us
5154		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5155	      || (GET_CODE (src_folded) == MINUS
5156		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5157		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5158	src_const = src_folded, src_const_elt = elt;
5159      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5160	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5161
5162      /* If we don't know if the constant is in the table, get its
5163	 hash code and look it up.  */
5164      if (src_const && src_const_elt == 0)
5165	{
5166	  sets[i].src_const_hash = HASH (src_const, mode);
5167	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5168	}
5169
5170      sets[i].src_const = src_const;
5171      sets[i].src_const_elt = src_const_elt;
5172
5173      /* If the constant and our source are both in the table, mark them as
5174	 equivalent.  Otherwise, if a constant is in the table but the source
5175	 isn't, set ELT to it.  */
5176      if (src_const_elt && elt
5177	  && src_const_elt->first_same_value != elt->first_same_value)
5178	merge_equiv_classes (elt, src_const_elt);
5179      else if (src_const_elt && elt == 0)
5180	elt = src_const_elt;
5181
5182      /* See if there is a register linearly related to a constant
5183         equivalent of SRC.  */
5184      if (src_const
5185	  && (GET_CODE (src_const) == CONST
5186	      || (src_const_elt && src_const_elt->related_value != 0)))
5187	{
5188	  src_related = use_related_value (src_const, src_const_elt);
5189	  if (src_related)
5190	    {
5191	      struct table_elt *src_related_elt
5192		= lookup (src_related, HASH (src_related, mode), mode);
5193	      if (src_related_elt && elt)
5194		{
5195		  if (elt->first_same_value
5196		      != src_related_elt->first_same_value)
5197		    /* This can occur when we previously saw a CONST
5198		       involving a SYMBOL_REF and then see the SYMBOL_REF
5199		       twice.  Merge the involved classes.  */
5200		    merge_equiv_classes (elt, src_related_elt);
5201
5202		  src_related = 0;
5203		  src_related_elt = 0;
5204		}
5205	      else if (src_related_elt && elt == 0)
5206		elt = src_related_elt;
5207	    }
5208	}
5209
5210      /* See if we have a CONST_INT that is already in a register in a
5211	 wider mode.  */
5212
5213      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5214	  && GET_MODE_CLASS (mode) == MODE_INT
5215	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5216	{
5217	  enum machine_mode wider_mode;
5218
5219	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5220	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5221	       && src_related == 0;
5222	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5223	    {
5224	      struct table_elt *const_elt
5225		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5226
5227	      if (const_elt == 0)
5228		continue;
5229
5230	      for (const_elt = const_elt->first_same_value;
5231		   const_elt; const_elt = const_elt->next_same_value)
5232		if (GET_CODE (const_elt->exp) == REG)
5233		  {
5234		    src_related = gen_lowpart_if_possible (mode,
5235							   const_elt->exp);
5236		    break;
5237		  }
5238	    }
5239	}
5240
5241      /* Another possibility is that we have an AND with a constant in
5242	 a mode narrower than a word.  If so, it might have been generated
5243	 as part of an "if" which would narrow the AND.  If we already
5244	 have done the AND in a wider mode, we can use a SUBREG of that
5245	 value.  */
5246
5247      if (flag_expensive_optimizations && ! src_related
5248	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5249	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5250	{
5251	  enum machine_mode tmode;
5252	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5253
5254	  for (tmode = GET_MODE_WIDER_MODE (mode);
5255	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5256	       tmode = GET_MODE_WIDER_MODE (tmode))
5257	    {
5258	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5259	      struct table_elt *larger_elt;
5260
5261	      if (inner)
5262		{
5263		  PUT_MODE (new_and, tmode);
5264		  XEXP (new_and, 0) = inner;
5265		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5266		  if (larger_elt == 0)
5267		    continue;
5268
5269		  for (larger_elt = larger_elt->first_same_value;
5270		       larger_elt; larger_elt = larger_elt->next_same_value)
5271		    if (GET_CODE (larger_elt->exp) == REG)
5272		      {
5273			src_related
5274			  = gen_lowpart_if_possible (mode, larger_elt->exp);
5275			break;
5276		      }
5277
5278		  if (src_related)
5279		    break;
5280		}
5281	    }
5282	}
5283
5284#ifdef LOAD_EXTEND_OP
5285      /* See if a MEM has already been loaded with a widening operation;
5286	 if it has, we can use a subreg of that.  Many CISC machines
5287	 also have such operations, but this is only likely to be
5288	 beneficial these machines.  */
5289
5290      if (flag_expensive_optimizations && src_related == 0
5291	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5292	  && GET_MODE_CLASS (mode) == MODE_INT
5293	  && GET_CODE (src) == MEM && ! do_not_record
5294	  && LOAD_EXTEND_OP (mode) != NIL)
5295	{
5296	  enum machine_mode tmode;
5297
5298	  /* Set what we are trying to extend and the operation it might
5299	     have been extended with.  */
5300	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5301	  XEXP (memory_extend_rtx, 0) = src;
5302
5303	  for (tmode = GET_MODE_WIDER_MODE (mode);
5304	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5305	       tmode = GET_MODE_WIDER_MODE (tmode))
5306	    {
5307	      struct table_elt *larger_elt;
5308
5309	      PUT_MODE (memory_extend_rtx, tmode);
5310	      larger_elt = lookup (memory_extend_rtx,
5311				   HASH (memory_extend_rtx, tmode), tmode);
5312	      if (larger_elt == 0)
5313		continue;
5314
5315	      for (larger_elt = larger_elt->first_same_value;
5316		   larger_elt; larger_elt = larger_elt->next_same_value)
5317		if (GET_CODE (larger_elt->exp) == REG)
5318		  {
5319		    src_related = gen_lowpart_if_possible (mode,
5320							   larger_elt->exp);
5321		    break;
5322		  }
5323
5324	      if (src_related)
5325		break;
5326	    }
5327	}
5328#endif /* LOAD_EXTEND_OP */
5329
5330      if (src == src_folded)
5331	src_folded = 0;
5332
5333      /* At this point, ELT, if non-zero, points to a class of expressions
5334         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5335	 and SRC_RELATED, if non-zero, each contain additional equivalent
5336	 expressions.  Prune these latter expressions by deleting expressions
5337	 already in the equivalence class.
5338
5339	 Check for an equivalent identical to the destination.  If found,
5340	 this is the preferred equivalent since it will likely lead to
5341	 elimination of the insn.  Indicate this by placing it in
5342	 `src_related'.  */
5343
5344      if (elt)
5345	elt = elt->first_same_value;
5346      for (p = elt; p; p = p->next_same_value)
5347	{
5348	  enum rtx_code code = GET_CODE (p->exp);
5349
5350	  /* If the expression is not valid, ignore it.  Then we do not
5351	     have to check for validity below.  In most cases, we can use
5352	     `rtx_equal_p', since canonicalization has already been done.  */
5353	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5354	    continue;
5355
5356	  /* Also skip paradoxical subregs, unless that's what we're
5357	     looking for.  */
5358	  if (code == SUBREG
5359	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5360		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5361	      && ! (src != 0
5362		    && GET_CODE (src) == SUBREG
5363		    && GET_MODE (src) == GET_MODE (p->exp)
5364		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5365			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5366	    continue;
5367
5368	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5369	    src = 0;
5370	  else if (src_folded && GET_CODE (src_folded) == code
5371		   && rtx_equal_p (src_folded, p->exp))
5372	    src_folded = 0;
5373	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5374		   && rtx_equal_p (src_eqv_here, p->exp))
5375	    src_eqv_here = 0;
5376	  else if (src_related && GET_CODE (src_related) == code
5377		   && rtx_equal_p (src_related, p->exp))
5378	    src_related = 0;
5379
5380	  /* This is the same as the destination of the insns, we want
5381	     to prefer it.  Copy it to src_related.  The code below will
5382	     then give it a negative cost.  */
5383	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5384	    src_related = dest;
5385	}
5386
5387      /* Find the cheapest valid equivalent, trying all the available
5388         possibilities.  Prefer items not in the hash table to ones
5389         that are when they are equal cost.  Note that we can never
5390         worsen an insn as the current contents will also succeed.
5391	 If we find an equivalent identical to the destination, use it as best,
5392	 since this insn will probably be eliminated in that case.  */
5393      if (src)
5394	{
5395	  if (rtx_equal_p (src, dest))
5396	    src_cost = src_regcost = -1;
5397	  else
5398	    {
5399	      src_cost = COST (src);
5400	      src_regcost = approx_reg_cost (src);
5401	    }
5402	}
5403
5404      if (src_eqv_here)
5405	{
5406	  if (rtx_equal_p (src_eqv_here, dest))
5407	    src_eqv_cost = src_eqv_regcost = -1;
5408	  else
5409	    {
5410	      src_eqv_cost = COST (src_eqv_here);
5411	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5412	    }
5413	}
5414
5415      if (src_folded)
5416	{
5417	  if (rtx_equal_p (src_folded, dest))
5418	    src_folded_cost = src_folded_regcost = -1;
5419	  else
5420	    {
5421	      src_folded_cost = COST (src_folded);
5422	      src_folded_regcost = approx_reg_cost (src_folded);
5423	    }
5424	}
5425
5426      if (src_related)
5427	{
5428	  if (rtx_equal_p (src_related, dest))
5429	    src_related_cost = src_related_regcost = -1;
5430	  else
5431	    {
5432	      src_related_cost = COST (src_related);
5433	      src_related_regcost = approx_reg_cost (src_related);
5434	    }
5435	}
5436
5437      /* If this was an indirect jump insn, a known label will really be
5438	 cheaper even though it looks more expensive.  */
5439      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5440	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5441
5442      /* Terminate loop when replacement made.  This must terminate since
5443         the current contents will be tested and will always be valid.  */
5444      while (1)
5445	{
5446	  rtx trial;
5447
5448	  /* Skip invalid entries.  */
5449	  while (elt && GET_CODE (elt->exp) != REG
5450		 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5451	    elt = elt->next_same_value;
5452
5453	  /* A paradoxical subreg would be bad here: it'll be the right
5454	     size, but later may be adjusted so that the upper bits aren't
5455	     what we want.  So reject it.  */
5456	  if (elt != 0
5457	      && GET_CODE (elt->exp) == SUBREG
5458	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5459		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5460	      /* It is okay, though, if the rtx we're trying to match
5461		 will ignore any of the bits we can't predict.  */
5462	      && ! (src != 0
5463		    && GET_CODE (src) == SUBREG
5464		    && GET_MODE (src) == GET_MODE (elt->exp)
5465		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5466			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5467	    {
5468	      elt = elt->next_same_value;
5469	      continue;
5470	    }
5471
5472          if (elt)
5473	    {
5474	      src_elt_cost = elt->cost;
5475	      src_elt_regcost = elt->regcost;
5476	    }
5477
5478          /* Find cheapest and skip it for the next time.   For items
5479	     of equal cost, use this order:
5480	     src_folded, src, src_eqv, src_related and hash table entry.  */
5481	  if (src_folded
5482	      && preferrable (src_folded_cost, src_folded_regcost,
5483			      src_cost, src_regcost) <= 0
5484	      && preferrable (src_folded_cost, src_folded_regcost,
5485			      src_eqv_cost, src_eqv_regcost) <= 0
5486	      && preferrable (src_folded_cost, src_folded_regcost,
5487			      src_related_cost, src_related_regcost) <= 0
5488	      && preferrable (src_folded_cost, src_folded_regcost,
5489			      src_elt_cost, src_elt_regcost) <= 0)
5490	    {
5491	      trial = src_folded, src_folded_cost = MAX_COST;
5492	      if (src_folded_force_flag)
5493		trial = force_const_mem (mode, trial);
5494	    }
5495	  else if (src
5496		   && preferrable (src_cost, src_regcost,
5497				   src_eqv_cost, src_eqv_regcost) <= 0
5498		   && preferrable (src_cost, src_regcost,
5499				   src_related_cost, src_related_regcost) <= 0
5500		   && preferrable (src_cost, src_regcost,
5501				   src_elt_cost, src_elt_regcost) <= 0)
5502	    trial = src, src_cost = MAX_COST;
5503	  else if (src_eqv_here
5504		   && preferrable (src_eqv_cost, src_eqv_regcost,
5505				   src_related_cost, src_related_regcost) <= 0
5506		   && preferrable (src_eqv_cost, src_eqv_regcost,
5507				   src_elt_cost, src_elt_regcost) <= 0)
5508	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5509	  else if (src_related
5510		   && preferrable (src_related_cost, src_related_regcost,
5511				   src_elt_cost, src_elt_regcost) <= 0)
5512  	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5513	  else
5514	    {
5515	      trial = copy_rtx (elt->exp);
5516	      elt = elt->next_same_value;
5517	      src_elt_cost = MAX_COST;
5518	    }
5519
5520	  /* We don't normally have an insn matching (set (pc) (pc)), so
5521	     check for this separately here.  We will delete such an
5522	     insn below.
5523
5524	     For other cases such as a table jump or conditional jump
5525	     where we know the ultimate target, go ahead and replace the
5526	     operand.  While that may not make a valid insn, we will
5527	     reemit the jump below (and also insert any necessary
5528	     barriers).  */
5529	  if (n_sets == 1 && dest == pc_rtx
5530	      && (trial == pc_rtx
5531		  || (GET_CODE (trial) == LABEL_REF
5532		      && ! condjump_p (insn))))
5533	    {
5534	      SET_SRC (sets[i].rtl) = trial;
5535	      cse_jumps_altered = 1;
5536	      break;
5537	    }
5538
5539	  /* Look for a substitution that makes a valid insn.  */
5540	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5541	    {
5542	      /* If we just made a substitution inside a libcall, then we
5543		 need to make the same substitution in any notes attached
5544		 to the RETVAL insn.  */
5545	      if (libcall_insn
5546		  && (GET_CODE (sets[i].orig_src) == REG
5547		      || GET_CODE (sets[i].orig_src) == SUBREG
5548		      || GET_CODE (sets[i].orig_src) == MEM))
5549		replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5550			     canon_reg (SET_SRC (sets[i].rtl), insn));
5551
5552	      /* The result of apply_change_group can be ignored; see
5553		 canon_reg.  */
5554
5555	      validate_change (insn, &SET_SRC (sets[i].rtl),
5556			       canon_reg (SET_SRC (sets[i].rtl), insn),
5557			       1);
5558	      apply_change_group ();
5559	      break;
5560	    }
5561
5562	  /* If we previously found constant pool entries for
5563	     constants and this is a constant, try making a
5564	     pool entry.  Put it in src_folded unless we already have done
5565	     this since that is where it likely came from.  */
5566
5567	  else if (constant_pool_entries_cost
5568		   && CONSTANT_P (trial)
5569		   /* Reject cases that will abort in decode_rtx_const.
5570		      On the alpha when simplifying a switch, we get
5571		      (const (truncate (minus (label_ref) (label_ref)))).  */
5572		   && ! (GET_CODE (trial) == CONST
5573			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5574		   /* Likewise on IA-64, except without the truncate.  */
5575		   && ! (GET_CODE (trial) == CONST
5576			 && GET_CODE (XEXP (trial, 0)) == MINUS
5577			 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5578			 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5579		   && (src_folded == 0
5580		       || (GET_CODE (src_folded) != MEM
5581			   && ! src_folded_force_flag))
5582		   && GET_MODE_CLASS (mode) != MODE_CC
5583		   && mode != VOIDmode)
5584	    {
5585	      src_folded_force_flag = 1;
5586	      src_folded = trial;
5587	      src_folded_cost = constant_pool_entries_cost;
5588	    }
5589	}
5590
5591      src = SET_SRC (sets[i].rtl);
5592
5593      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5594	 However, there is an important exception:  If both are registers
5595	 that are not the head of their equivalence class, replace SET_SRC
5596	 with the head of the class.  If we do not do this, we will have
5597	 both registers live over a portion of the basic block.  This way,
5598	 their lifetimes will likely abut instead of overlapping.  */
5599      if (GET_CODE (dest) == REG
5600	  && REGNO_QTY_VALID_P (REGNO (dest)))
5601	{
5602	  int dest_q = REG_QTY (REGNO (dest));
5603	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5604
5605	  if (dest_ent->mode == GET_MODE (dest)
5606	      && dest_ent->first_reg != REGNO (dest)
5607	      && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5608	      /* Don't do this if the original insn had a hard reg as
5609		 SET_SRC or SET_DEST.  */
5610	      && (GET_CODE (sets[i].src) != REG
5611		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5612	      && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5613	    /* We can't call canon_reg here because it won't do anything if
5614	       SRC is a hard register.  */
5615	    {
5616	      int src_q = REG_QTY (REGNO (src));
5617	      struct qty_table_elem *src_ent = &qty_table[src_q];
5618	      int first = src_ent->first_reg;
5619	      rtx new_src
5620		= (first >= FIRST_PSEUDO_REGISTER
5621		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5622
5623	      /* We must use validate-change even for this, because this
5624		 might be a special no-op instruction, suitable only to
5625		 tag notes onto.  */
5626	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5627		{
5628		  src = new_src;
5629		  /* If we had a constant that is cheaper than what we are now
5630		     setting SRC to, use that constant.  We ignored it when we
5631		     thought we could make this into a no-op.  */
5632		  if (src_const && COST (src_const) < COST (src)
5633		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5634					  src_const, 0))
5635		    src = src_const;
5636		}
5637	    }
5638	}
5639
5640      /* If we made a change, recompute SRC values.  */
5641      if (src != sets[i].src)
5642	{
5643	  cse_altered = 1;
5644	  do_not_record = 0;
5645	  hash_arg_in_memory = 0;
5646	  sets[i].src = src;
5647	  sets[i].src_hash = HASH (src, mode);
5648	  sets[i].src_volatile = do_not_record;
5649	  sets[i].src_in_memory = hash_arg_in_memory;
5650	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5651	}
5652
5653      /* If this is a single SET, we are setting a register, and we have an
5654	 equivalent constant, we want to add a REG_NOTE.   We don't want
5655	 to write a REG_EQUAL note for a constant pseudo since verifying that
5656	 that pseudo hasn't been eliminated is a pain.  Such a note also
5657	 won't help anything.
5658
5659	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5660	 which can be created for a reference to a compile time computable
5661	 entry in a jump table.  */
5662
5663      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5664	  && GET_CODE (src_const) != REG
5665	  && ! (GET_CODE (src_const) == CONST
5666		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5667		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5668		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5669	{
5670	  /* Make sure that the rtx is not shared with any other insn.  */
5671	  src_const = copy_rtx (src_const);
5672
5673	  /* Record the actual constant value in a REG_EQUAL note, making
5674	     a new one if one does not already exist.  */
5675	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5676
5677          /* If storing a constant value in a register that
5678	     previously held the constant value 0,
5679	     record this fact with a REG_WAS_0 note on this insn.
5680
5681	     Note that the *register* is required to have previously held 0,
5682	     not just any register in the quantity and we must point to the
5683	     insn that set that register to zero.
5684
5685	     Rather than track each register individually, we just see if
5686	     the last set for this quantity was for this register.  */
5687
5688	  if (REGNO_QTY_VALID_P (REGNO (dest)))
5689	    {
5690	      int dest_q = REG_QTY (REGNO (dest));
5691	      struct qty_table_elem *dest_ent = &qty_table[dest_q];
5692
5693	      if (dest_ent->const_rtx == const0_rtx)
5694		{
5695		  /* See if we previously had a REG_WAS_0 note.  */
5696		  rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5697		  rtx const_insn = dest_ent->const_insn;
5698
5699		  if ((tem = single_set (const_insn)) != 0
5700		      && rtx_equal_p (SET_DEST (tem), dest))
5701		    {
5702		      if (note)
5703			XEXP (note, 0) = const_insn;
5704		      else
5705			REG_NOTES (insn)
5706			  = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5707					       REG_NOTES (insn));
5708		    }
5709		}
5710	    }
5711	}
5712
5713      /* Now deal with the destination.  */
5714      do_not_record = 0;
5715
5716      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5717	 to the MEM or REG within it.  */
5718      while (GET_CODE (dest) == SIGN_EXTRACT
5719	     || GET_CODE (dest) == ZERO_EXTRACT
5720	     || GET_CODE (dest) == SUBREG
5721	     || GET_CODE (dest) == STRICT_LOW_PART)
5722	dest = XEXP (dest, 0);
5723
5724      sets[i].inner_dest = dest;
5725
5726      if (GET_CODE (dest) == MEM)
5727	{
5728#ifdef PUSH_ROUNDING
5729	  /* Stack pushes invalidate the stack pointer.  */
5730	  rtx addr = XEXP (dest, 0);
5731	  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5732	      && XEXP (addr, 0) == stack_pointer_rtx)
5733	    invalidate (stack_pointer_rtx, Pmode);
5734#endif
5735	  dest = fold_rtx (dest, insn);
5736	}
5737
5738      /* Compute the hash code of the destination now,
5739	 before the effects of this instruction are recorded,
5740	 since the register values used in the address computation
5741	 are those before this instruction.  */
5742      sets[i].dest_hash = HASH (dest, mode);
5743
5744      /* Don't enter a bit-field in the hash table
5745	 because the value in it after the store
5746	 may not equal what was stored, due to truncation.  */
5747
5748      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5749	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5750	{
5751	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5752
5753	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5754	      && GET_CODE (width) == CONST_INT
5755	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5756	      && ! (INTVAL (src_const)
5757		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5758	    /* Exception: if the value is constant,
5759	       and it won't be truncated, record it.  */
5760	    ;
5761	  else
5762	    {
5763	      /* This is chosen so that the destination will be invalidated
5764		 but no new value will be recorded.
5765		 We must invalidate because sometimes constant
5766		 values can be recorded for bitfields.  */
5767	      sets[i].src_elt = 0;
5768	      sets[i].src_volatile = 1;
5769	      src_eqv = 0;
5770	      src_eqv_elt = 0;
5771	    }
5772	}
5773
5774      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5775	 the insn.  */
5776      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5777	{
5778	  /* One less use of the label this insn used to jump to.  */
5779	  delete_insn (insn);
5780	  cse_jumps_altered = 1;
5781	  /* No more processing for this set.  */
5782	  sets[i].rtl = 0;
5783	}
5784
5785      /* If this SET is now setting PC to a label, we know it used to
5786	 be a conditional or computed branch.  */
5787      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5788	{
5789	  /* Now emit a BARRIER after the unconditional jump.  */
5790	  if (NEXT_INSN (insn) == 0
5791	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5792	    emit_barrier_after (insn);
5793
5794	  /* We reemit the jump in as many cases as possible just in
5795	     case the form of an unconditional jump is significantly
5796	     different than a computed jump or conditional jump.
5797
5798	     If this insn has multiple sets, then reemitting the
5799	     jump is nontrivial.  So instead we just force rerecognition
5800	     and hope for the best.  */
5801	  if (n_sets == 1)
5802	    {
5803	      rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5804
5805	      JUMP_LABEL (new) = XEXP (src, 0);
5806	      LABEL_NUSES (XEXP (src, 0))++;
5807	      insn = new;
5808
5809	      /* Now emit a BARRIER after the unconditional jump.  */
5810	      if (NEXT_INSN (insn) == 0
5811		  || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5812		emit_barrier_after (insn);
5813	    }
5814	  else
5815	    INSN_CODE (insn) = -1;
5816
5817	  never_reached_warning (insn, NULL);
5818
5819	  /* Do not bother deleting any unreachable code,
5820	     let jump/flow do that.  */
5821
5822	  cse_jumps_altered = 1;
5823	  sets[i].rtl = 0;
5824	}
5825
5826      /* If destination is volatile, invalidate it and then do no further
5827	 processing for this assignment.  */
5828
5829      else if (do_not_record)
5830	{
5831	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5832	    invalidate (dest, VOIDmode);
5833	  else if (GET_CODE (dest) == MEM)
5834	    {
5835	      /* Outgoing arguments for a libcall don't
5836		 affect any recorded expressions.  */
5837	      if (! libcall_insn || insn == libcall_insn)
5838		invalidate (dest, VOIDmode);
5839	    }
5840	  else if (GET_CODE (dest) == STRICT_LOW_PART
5841		   || GET_CODE (dest) == ZERO_EXTRACT)
5842	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5843	  sets[i].rtl = 0;
5844	}
5845
5846      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5847	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5848
5849#ifdef HAVE_cc0
5850      /* If setting CC0, record what it was set to, or a constant, if it
5851	 is equivalent to a constant.  If it is being set to a floating-point
5852	 value, make a COMPARE with the appropriate constant of 0.  If we
5853	 don't do this, later code can interpret this as a test against
5854	 const0_rtx, which can cause problems if we try to put it into an
5855	 insn as a floating-point operand.  */
5856      if (dest == cc0_rtx)
5857	{
5858	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5859	  this_insn_cc0_mode = mode;
5860	  if (FLOAT_MODE_P (mode))
5861	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5862					     CONST0_RTX (mode));
5863	}
5864#endif
5865    }
5866
5867  /* Now enter all non-volatile source expressions in the hash table
5868     if they are not already present.
5869     Record their equivalence classes in src_elt.
5870     This way we can insert the corresponding destinations into
5871     the same classes even if the actual sources are no longer in them
5872     (having been invalidated).  */
5873
5874  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5875      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5876    {
5877      struct table_elt *elt;
5878      struct table_elt *classp = sets[0].src_elt;
5879      rtx dest = SET_DEST (sets[0].rtl);
5880      enum machine_mode eqvmode = GET_MODE (dest);
5881
5882      if (GET_CODE (dest) == STRICT_LOW_PART)
5883	{
5884	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5885	  classp = 0;
5886	}
5887      if (insert_regs (src_eqv, classp, 0))
5888	{
5889	  rehash_using_reg (src_eqv);
5890	  src_eqv_hash = HASH (src_eqv, eqvmode);
5891	}
5892      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5893      elt->in_memory = src_eqv_in_memory;
5894      src_eqv_elt = elt;
5895
5896      /* Check to see if src_eqv_elt is the same as a set source which
5897	 does not yet have an elt, and if so set the elt of the set source
5898	 to src_eqv_elt.  */
5899      for (i = 0; i < n_sets; i++)
5900	if (sets[i].rtl && sets[i].src_elt == 0
5901	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5902	  sets[i].src_elt = src_eqv_elt;
5903    }
5904
5905  for (i = 0; i < n_sets; i++)
5906    if (sets[i].rtl && ! sets[i].src_volatile
5907	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5908      {
5909	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5910	  {
5911	    /* REG_EQUAL in setting a STRICT_LOW_PART
5912	       gives an equivalent for the entire destination register,
5913	       not just for the subreg being stored in now.
5914	       This is a more interesting equivalence, so we arrange later
5915	       to treat the entire reg as the destination.  */
5916	    sets[i].src_elt = src_eqv_elt;
5917	    sets[i].src_hash = src_eqv_hash;
5918	  }
5919	else
5920	  {
5921	    /* Insert source and constant equivalent into hash table, if not
5922	       already present.  */
5923	    struct table_elt *classp = src_eqv_elt;
5924	    rtx src = sets[i].src;
5925	    rtx dest = SET_DEST (sets[i].rtl);
5926	    enum machine_mode mode
5927	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5928
5929	    if (sets[i].src_elt == 0)
5930	      {
5931		/* Don't put a hard register source into the table if this is
5932		   the last insn of a libcall.  In this case, we only need
5933		   to put src_eqv_elt in src_elt.  */
5934		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5935		  {
5936		    struct table_elt *elt;
5937
5938		    /* Note that these insert_regs calls cannot remove
5939		       any of the src_elt's, because they would have failed to
5940		       match if not still valid.  */
5941		    if (insert_regs (src, classp, 0))
5942		      {
5943			rehash_using_reg (src);
5944			sets[i].src_hash = HASH (src, mode);
5945		      }
5946		    elt = insert (src, classp, sets[i].src_hash, mode);
5947		    elt->in_memory = sets[i].src_in_memory;
5948		    sets[i].src_elt = classp = elt;
5949		  }
5950		else
5951		  sets[i].src_elt = classp;
5952	      }
5953	    if (sets[i].src_const && sets[i].src_const_elt == 0
5954		&& src != sets[i].src_const
5955		&& ! rtx_equal_p (sets[i].src_const, src))
5956	      sets[i].src_elt = insert (sets[i].src_const, classp,
5957					sets[i].src_const_hash, mode);
5958	  }
5959      }
5960    else if (sets[i].src_elt == 0)
5961      /* If we did not insert the source into the hash table (e.g., it was
5962	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5963	 so that the destination goes into that class.  */
5964      sets[i].src_elt = src_eqv_elt;
5965
5966  invalidate_from_clobbers (x);
5967
5968  /* Some registers are invalidated by subroutine calls.  Memory is
5969     invalidated by non-constant calls.  */
5970
5971  if (GET_CODE (insn) == CALL_INSN)
5972    {
5973      if (! CONST_OR_PURE_CALL_P (insn))
5974	invalidate_memory ();
5975      invalidate_for_call ();
5976    }
5977
5978  /* Now invalidate everything set by this instruction.
5979     If a SUBREG or other funny destination is being set,
5980     sets[i].rtl is still nonzero, so here we invalidate the reg
5981     a part of which is being set.  */
5982
5983  for (i = 0; i < n_sets; i++)
5984    if (sets[i].rtl)
5985      {
5986	/* We can't use the inner dest, because the mode associated with
5987	   a ZERO_EXTRACT is significant.  */
5988	rtx dest = SET_DEST (sets[i].rtl);
5989
5990	/* Needed for registers to remove the register from its
5991	   previous quantity's chain.
5992	   Needed for memory if this is a nonvarying address, unless
5993	   we have just done an invalidate_memory that covers even those.  */
5994	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5995	  invalidate (dest, VOIDmode);
5996	else if (GET_CODE (dest) == MEM)
5997	  {
5998	    /* Outgoing arguments for a libcall don't
5999	       affect any recorded expressions.  */
6000	    if (! libcall_insn || insn == libcall_insn)
6001	      invalidate (dest, VOIDmode);
6002	  }
6003	else if (GET_CODE (dest) == STRICT_LOW_PART
6004		 || GET_CODE (dest) == ZERO_EXTRACT)
6005	  invalidate (XEXP (dest, 0), GET_MODE (dest));
6006      }
6007
6008  /* A volatile ASM invalidates everything.  */
6009  if (GET_CODE (insn) == INSN
6010      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6011      && MEM_VOLATILE_P (PATTERN (insn)))
6012    flush_hash_table ();
6013
6014  /* Make sure registers mentioned in destinations
6015     are safe for use in an expression to be inserted.
6016     This removes from the hash table
6017     any invalid entry that refers to one of these registers.
6018
6019     We don't care about the return value from mention_regs because
6020     we are going to hash the SET_DEST values unconditionally.  */
6021
6022  for (i = 0; i < n_sets; i++)
6023    {
6024      if (sets[i].rtl)
6025	{
6026	  rtx x = SET_DEST (sets[i].rtl);
6027
6028	  if (GET_CODE (x) != REG)
6029	    mention_regs (x);
6030	  else
6031	    {
6032	      /* We used to rely on all references to a register becoming
6033		 inaccessible when a register changes to a new quantity,
6034		 since that changes the hash code.  However, that is not
6035		 safe, since after HASH_SIZE new quantities we get a
6036		 hash 'collision' of a register with its own invalid
6037		 entries.  And since SUBREGs have been changed not to
6038		 change their hash code with the hash code of the register,
6039		 it wouldn't work any longer at all.  So we have to check
6040		 for any invalid references lying around now.
6041		 This code is similar to the REG case in mention_regs,
6042		 but it knows that reg_tick has been incremented, and
6043		 it leaves reg_in_table as -1 .  */
6044	      unsigned int regno = REGNO (x);
6045	      unsigned int endregno
6046		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6047			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6048	      unsigned int i;
6049
6050	      for (i = regno; i < endregno; i++)
6051		{
6052		  if (REG_IN_TABLE (i) >= 0)
6053		    {
6054		      remove_invalid_refs (i);
6055		      REG_IN_TABLE (i) = -1;
6056		    }
6057		}
6058	    }
6059	}
6060    }
6061
6062  /* We may have just removed some of the src_elt's from the hash table.
6063     So replace each one with the current head of the same class.  */
6064
6065  for (i = 0; i < n_sets; i++)
6066    if (sets[i].rtl)
6067      {
6068	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6069	  /* If elt was removed, find current head of same class,
6070	     or 0 if nothing remains of that class.  */
6071	  {
6072	    struct table_elt *elt = sets[i].src_elt;
6073
6074	    while (elt && elt->prev_same_value)
6075	      elt = elt->prev_same_value;
6076
6077	    while (elt && elt->first_same_value == 0)
6078	      elt = elt->next_same_value;
6079	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6080	  }
6081      }
6082
6083  /* Now insert the destinations into their equivalence classes.  */
6084
6085  for (i = 0; i < n_sets; i++)
6086    if (sets[i].rtl)
6087      {
6088	rtx dest = SET_DEST (sets[i].rtl);
6089	rtx inner_dest = sets[i].inner_dest;
6090	struct table_elt *elt;
6091
6092	/* Don't record value if we are not supposed to risk allocating
6093	   floating-point values in registers that might be wider than
6094	   memory.  */
6095	if ((flag_float_store
6096	     && GET_CODE (dest) == MEM
6097	     && FLOAT_MODE_P (GET_MODE (dest)))
6098	    /* Don't record BLKmode values, because we don't know the
6099	       size of it, and can't be sure that other BLKmode values
6100	       have the same or smaller size.  */
6101	    || GET_MODE (dest) == BLKmode
6102	    /* Don't record values of destinations set inside a libcall block
6103	       since we might delete the libcall.  Things should have been set
6104	       up so we won't want to reuse such a value, but we play it safe
6105	       here.  */
6106	    || libcall_insn
6107	    /* If we didn't put a REG_EQUAL value or a source into the hash
6108	       table, there is no point is recording DEST.  */
6109	    || sets[i].src_elt == 0
6110	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6111	       or SIGN_EXTEND, don't record DEST since it can cause
6112	       some tracking to be wrong.
6113
6114	       ??? Think about this more later.  */
6115	    || (GET_CODE (dest) == SUBREG
6116		&& (GET_MODE_SIZE (GET_MODE (dest))
6117		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6118		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6119		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6120	  continue;
6121
6122	/* STRICT_LOW_PART isn't part of the value BEING set,
6123	   and neither is the SUBREG inside it.
6124	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6125	if (GET_CODE (dest) == STRICT_LOW_PART)
6126	  dest = SUBREG_REG (XEXP (dest, 0));
6127
6128	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6129	  /* Registers must also be inserted into chains for quantities.  */
6130	  if (insert_regs (dest, sets[i].src_elt, 1))
6131	    {
6132	      /* If `insert_regs' changes something, the hash code must be
6133		 recalculated.  */
6134	      rehash_using_reg (dest);
6135	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6136	    }
6137
6138	if (GET_CODE (inner_dest) == MEM
6139	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6140	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6141	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
6142	     Consider the case in which the address of the MEM is
6143	     passed to a function, which alters the MEM.  Then, if we
6144	     later use Y instead of the MEM we'll miss the update.  */
6145	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6146	else
6147	  elt = insert (dest, sets[i].src_elt,
6148			sets[i].dest_hash, GET_MODE (dest));
6149
6150	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6151			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6152			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6153							  0))));
6154
6155	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6156	   narrower than M2, and both M1 and M2 are the same number of words,
6157	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6158	   make that equivalence as well.
6159
6160	   However, BAR may have equivalences for which gen_lowpart_if_possible
6161	   will produce a simpler value than gen_lowpart_if_possible applied to
6162	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6163	   BAR's equivalences.  If we don't get a simplified form, make
6164	   the SUBREG.  It will not be used in an equivalence, but will
6165	   cause two similar assignments to be detected.
6166
6167	   Note the loop below will find SUBREG_REG (DEST) since we have
6168	   already entered SRC and DEST of the SET in the table.  */
6169
6170	if (GET_CODE (dest) == SUBREG
6171	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6172		 / UNITS_PER_WORD)
6173		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6174	    && (GET_MODE_SIZE (GET_MODE (dest))
6175		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6176	    && sets[i].src_elt != 0)
6177	  {
6178	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6179	    struct table_elt *elt, *classp = 0;
6180
6181	    for (elt = sets[i].src_elt->first_same_value; elt;
6182		 elt = elt->next_same_value)
6183	      {
6184		rtx new_src = 0;
6185		unsigned src_hash;
6186		struct table_elt *src_elt;
6187
6188		/* Ignore invalid entries.  */
6189		if (GET_CODE (elt->exp) != REG
6190		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6191		  continue;
6192
6193		new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6194		if (new_src == 0)
6195		  new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6196
6197		src_hash = HASH (new_src, new_mode);
6198		src_elt = lookup (new_src, src_hash, new_mode);
6199
6200		/* Put the new source in the hash table is if isn't
6201		   already.  */
6202		if (src_elt == 0)
6203		  {
6204		    if (insert_regs (new_src, classp, 0))
6205		      {
6206			rehash_using_reg (new_src);
6207			src_hash = HASH (new_src, new_mode);
6208		      }
6209		    src_elt = insert (new_src, classp, src_hash, new_mode);
6210		    src_elt->in_memory = elt->in_memory;
6211		  }
6212		else if (classp && classp != src_elt->first_same_value)
6213		  /* Show that two things that we've seen before are
6214		     actually the same.  */
6215		  merge_equiv_classes (src_elt, classp);
6216
6217		classp = src_elt->first_same_value;
6218		/* Ignore invalid entries.  */
6219		while (classp
6220		       && GET_CODE (classp->exp) != REG
6221		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6222		  classp = classp->next_same_value;
6223	      }
6224	  }
6225      }
6226
6227  /* Special handling for (set REG0 REG1) where REG0 is the
6228     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6229     be used in the sequel, so (if easily done) change this insn to
6230     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6231     that computed their value.  Then REG1 will become a dead store
6232     and won't cloud the situation for later optimizations.
6233
6234     Do not make this change if REG1 is a hard register, because it will
6235     then be used in the sequel and we may be changing a two-operand insn
6236     into a three-operand insn.
6237
6238     Also do not do this if we are operating on a copy of INSN.
6239
6240     Also don't do this if INSN ends a libcall; this would cause an unrelated
6241     register to be set in the middle of a libcall, and we then get bad code
6242     if the libcall is deleted.  */
6243
6244  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6245      && NEXT_INSN (PREV_INSN (insn)) == insn
6246      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6247      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6248      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6249    {
6250      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6251      struct qty_table_elem *src_ent = &qty_table[src_q];
6252
6253      if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6254	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6255	{
6256	  rtx prev = prev_nonnote_insn (insn);
6257
6258	  /* Do not swap the registers around if the previous instruction
6259	     attaches a REG_EQUIV note to REG1.
6260
6261	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6262	     from the pseudo that originally shadowed an incoming argument
6263	     to another register.  Some uses of REG_EQUIV might rely on it
6264	     being attached to REG1 rather than REG2.
6265
6266	     This section previously turned the REG_EQUIV into a REG_EQUAL
6267	     note.  We cannot do that because REG_EQUIV may provide an
6268	     uninitialised stack slot when REG_PARM_STACK_SPACE is used.  */
6269
6270	  if (prev != 0 && GET_CODE (prev) == INSN
6271	      && GET_CODE (PATTERN (prev)) == SET
6272	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6273	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6274	    {
6275	      rtx dest = SET_DEST (sets[0].rtl);
6276	      rtx src = SET_SRC (sets[0].rtl);
6277	      rtx note;
6278
6279	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6280	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6281	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6282	      apply_change_group ();
6283
6284	      /* If there was a REG_WAS_0 note on PREV, remove it.  Move
6285		 any REG_WAS_0 note on INSN to PREV.  */
6286	      note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6287	      if (note)
6288		remove_note (prev, note);
6289
6290	      note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6291	      if (note)
6292		{
6293		  remove_note (insn, note);
6294		  XEXP (note, 1) = REG_NOTES (prev);
6295		  REG_NOTES (prev) = note;
6296		}
6297
6298	      /* If INSN has a REG_EQUAL note, and this note mentions
6299		 REG0, then we must delete it, because the value in
6300		 REG0 has changed.  If the note's value is REG1, we must
6301		 also delete it because that is now this insn's dest.  */
6302	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6303	      if (note != 0
6304		  && (reg_mentioned_p (dest, XEXP (note, 0))
6305		      || rtx_equal_p (src, XEXP (note, 0))))
6306		remove_note (insn, note);
6307	    }
6308	}
6309    }
6310
6311  /* If this is a conditional jump insn, record any known equivalences due to
6312     the condition being tested.  */
6313
6314  last_jump_equiv_class = 0;
6315  if (GET_CODE (insn) == JUMP_INSN
6316      && n_sets == 1 && GET_CODE (x) == SET
6317      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6318    record_jump_equiv (insn, 0);
6319
6320#ifdef HAVE_cc0
6321  /* If the previous insn set CC0 and this insn no longer references CC0,
6322     delete the previous insn.  Here we use the fact that nothing expects CC0
6323     to be valid over an insn, which is true until the final pass.  */
6324  if (prev_insn && GET_CODE (prev_insn) == INSN
6325      && (tem = single_set (prev_insn)) != 0
6326      && SET_DEST (tem) == cc0_rtx
6327      && ! reg_mentioned_p (cc0_rtx, x))
6328    delete_insn (prev_insn);
6329
6330  prev_insn_cc0 = this_insn_cc0;
6331  prev_insn_cc0_mode = this_insn_cc0_mode;
6332#endif
6333
6334  prev_insn = insn;
6335}
6336
6337/* Remove from the hash table all expressions that reference memory.  */
6338
6339static void
6340invalidate_memory ()
6341{
6342  int i;
6343  struct table_elt *p, *next;
6344
6345  for (i = 0; i < HASH_SIZE; i++)
6346    for (p = table[i]; p; p = next)
6347      {
6348	next = p->next_same_hash;
6349	if (p->in_memory)
6350	  remove_from_table (p, i);
6351      }
6352}
6353
6354/* If ADDR is an address that implicitly affects the stack pointer, return
6355   1 and update the register tables to show the effect.  Else, return 0.  */
6356
6357static int
6358addr_affects_sp_p (addr)
6359     rtx addr;
6360{
6361  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6362      && GET_CODE (XEXP (addr, 0)) == REG
6363      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6364    {
6365      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6366	REG_TICK (STACK_POINTER_REGNUM)++;
6367
6368      /* This should be *very* rare.  */
6369      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6370	invalidate (stack_pointer_rtx, VOIDmode);
6371
6372      return 1;
6373    }
6374
6375  return 0;
6376}
6377
6378/* Perform invalidation on the basis of everything about an insn
6379   except for invalidating the actual places that are SET in it.
6380   This includes the places CLOBBERed, and anything that might
6381   alias with something that is SET or CLOBBERed.
6382
6383   X is the pattern of the insn.  */
6384
6385static void
6386invalidate_from_clobbers (x)
6387     rtx x;
6388{
6389  if (GET_CODE (x) == CLOBBER)
6390    {
6391      rtx ref = XEXP (x, 0);
6392      if (ref)
6393	{
6394	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6395	      || GET_CODE (ref) == MEM)
6396	    invalidate (ref, VOIDmode);
6397	  else if (GET_CODE (ref) == STRICT_LOW_PART
6398		   || GET_CODE (ref) == ZERO_EXTRACT)
6399	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6400	}
6401    }
6402  else if (GET_CODE (x) == PARALLEL)
6403    {
6404      int i;
6405      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6406	{
6407	  rtx y = XVECEXP (x, 0, i);
6408	  if (GET_CODE (y) == CLOBBER)
6409	    {
6410	      rtx ref = XEXP (y, 0);
6411	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6412		  || GET_CODE (ref) == MEM)
6413		invalidate (ref, VOIDmode);
6414	      else if (GET_CODE (ref) == STRICT_LOW_PART
6415		       || GET_CODE (ref) == ZERO_EXTRACT)
6416		invalidate (XEXP (ref, 0), GET_MODE (ref));
6417	    }
6418	}
6419    }
6420}
6421
6422/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6423   and replace any registers in them with either an equivalent constant
6424   or the canonical form of the register.  If we are inside an address,
6425   only do this if the address remains valid.
6426
6427   OBJECT is 0 except when within a MEM in which case it is the MEM.
6428
6429   Return the replacement for X.  */
6430
6431static rtx
6432cse_process_notes (x, object)
6433     rtx x;
6434     rtx object;
6435{
6436  enum rtx_code code = GET_CODE (x);
6437  const char *fmt = GET_RTX_FORMAT (code);
6438  int i;
6439
6440  switch (code)
6441    {
6442    case CONST_INT:
6443    case CONST:
6444    case SYMBOL_REF:
6445    case LABEL_REF:
6446    case CONST_DOUBLE:
6447    case CONST_VECTOR:
6448    case PC:
6449    case CC0:
6450    case LO_SUM:
6451      return x;
6452
6453    case MEM:
6454      validate_change (x, &XEXP (x, 0),
6455		       cse_process_notes (XEXP (x, 0), x), 0);
6456      return x;
6457
6458    case EXPR_LIST:
6459    case INSN_LIST:
6460      if (REG_NOTE_KIND (x) == REG_EQUAL)
6461	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6462      if (XEXP (x, 1))
6463	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6464      return x;
6465
6466    case SIGN_EXTEND:
6467    case ZERO_EXTEND:
6468    case SUBREG:
6469      {
6470	rtx new = cse_process_notes (XEXP (x, 0), object);
6471	/* We don't substitute VOIDmode constants into these rtx,
6472	   since they would impede folding.  */
6473	if (GET_MODE (new) != VOIDmode)
6474	  validate_change (object, &XEXP (x, 0), new, 0);
6475	return x;
6476      }
6477
6478    case REG:
6479      i = REG_QTY (REGNO (x));
6480
6481      /* Return a constant or a constant register.  */
6482      if (REGNO_QTY_VALID_P (REGNO (x)))
6483	{
6484	  struct qty_table_elem *ent = &qty_table[i];
6485
6486	  if (ent->const_rtx != NULL_RTX
6487	      && (CONSTANT_P (ent->const_rtx)
6488		  || GET_CODE (ent->const_rtx) == REG))
6489	    {
6490	      rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6491	      if (new)
6492		return new;
6493	    }
6494	}
6495
6496      /* Otherwise, canonicalize this register.  */
6497      return canon_reg (x, NULL_RTX);
6498
6499    default:
6500      break;
6501    }
6502
6503  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6504    if (fmt[i] == 'e')
6505      validate_change (object, &XEXP (x, i),
6506		       cse_process_notes (XEXP (x, i), object), 0);
6507
6508  return x;
6509}
6510
6511/* Find common subexpressions between the end test of a loop and the beginning
6512   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
6513
6514   Often we have a loop where an expression in the exit test is used
6515   in the body of the loop.  For example "while (*p) *q++ = *p++;".
6516   Because of the way we duplicate the loop exit test in front of the loop,
6517   however, we don't detect that common subexpression.  This will be caught
6518   when global cse is implemented, but this is a quite common case.
6519
6520   This function handles the most common cases of these common expressions.
6521   It is called after we have processed the basic block ending with the
6522   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6523   jumps to a label used only once.  */
6524
6525static void
6526cse_around_loop (loop_start)
6527     rtx loop_start;
6528{
6529  rtx insn;
6530  int i;
6531  struct table_elt *p;
6532
6533  /* If the jump at the end of the loop doesn't go to the start, we don't
6534     do anything.  */
6535  for (insn = PREV_INSN (loop_start);
6536       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6537       insn = PREV_INSN (insn))
6538    ;
6539
6540  if (insn == 0
6541      || GET_CODE (insn) != NOTE
6542      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6543    return;
6544
6545  /* If the last insn of the loop (the end test) was an NE comparison,
6546     we will interpret it as an EQ comparison, since we fell through
6547     the loop.  Any equivalences resulting from that comparison are
6548     therefore not valid and must be invalidated.  */
6549  if (last_jump_equiv_class)
6550    for (p = last_jump_equiv_class->first_same_value; p;
6551	 p = p->next_same_value)
6552      {
6553	if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6554	    || (GET_CODE (p->exp) == SUBREG
6555		&& GET_CODE (SUBREG_REG (p->exp)) == REG))
6556	  invalidate (p->exp, VOIDmode);
6557	else if (GET_CODE (p->exp) == STRICT_LOW_PART
6558		 || GET_CODE (p->exp) == ZERO_EXTRACT)
6559	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6560      }
6561
6562  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6563     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6564
6565     The only thing we do with SET_DEST is invalidate entries, so we
6566     can safely process each SET in order.  It is slightly less efficient
6567     to do so, but we only want to handle the most common cases.
6568
6569     The gen_move_insn call in cse_set_around_loop may create new pseudos.
6570     These pseudos won't have valid entries in any of the tables indexed
6571     by register number, such as reg_qty.  We avoid out-of-range array
6572     accesses by not processing any instructions created after cse started.  */
6573
6574  for (insn = NEXT_INSN (loop_start);
6575       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6576       && INSN_UID (insn) < max_insn_uid
6577       && ! (GET_CODE (insn) == NOTE
6578	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6579       insn = NEXT_INSN (insn))
6580    {
6581      if (INSN_P (insn)
6582	  && (GET_CODE (PATTERN (insn)) == SET
6583	      || GET_CODE (PATTERN (insn)) == CLOBBER))
6584	cse_set_around_loop (PATTERN (insn), insn, loop_start);
6585      else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6586	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6587	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6588	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6589	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6590				 loop_start);
6591    }
6592}
6593
6594/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6595   since they are done elsewhere.  This function is called via note_stores.  */
6596
6597static void
6598invalidate_skipped_set (dest, set, data)
6599     rtx set;
6600     rtx dest;
6601     void *data ATTRIBUTE_UNUSED;
6602{
6603  enum rtx_code code = GET_CODE (dest);
6604
6605  if (code == MEM
6606      && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6607      /* There are times when an address can appear varying and be a PLUS
6608	 during this scan when it would be a fixed address were we to know
6609	 the proper equivalences.  So invalidate all memory if there is
6610	 a BLKmode or nonscalar memory reference or a reference to a
6611	 variable address.  */
6612      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6613	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6614    {
6615      invalidate_memory ();
6616      return;
6617    }
6618
6619  if (GET_CODE (set) == CLOBBER
6620#ifdef HAVE_cc0
6621      || dest == cc0_rtx
6622#endif
6623      || dest == pc_rtx)
6624    return;
6625
6626  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6627    invalidate (XEXP (dest, 0), GET_MODE (dest));
6628  else if (code == REG || code == SUBREG || code == MEM)
6629    invalidate (dest, VOIDmode);
6630}
6631
6632/* Invalidate all insns from START up to the end of the function or the
6633   next label.  This called when we wish to CSE around a block that is
6634   conditionally executed.  */
6635
6636static void
6637invalidate_skipped_block (start)
6638     rtx start;
6639{
6640  rtx insn;
6641
6642  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6643       insn = NEXT_INSN (insn))
6644    {
6645      if (! INSN_P (insn))
6646	continue;
6647
6648      if (GET_CODE (insn) == CALL_INSN)
6649	{
6650	  if (! CONST_OR_PURE_CALL_P (insn))
6651	    invalidate_memory ();
6652	  invalidate_for_call ();
6653	}
6654
6655      invalidate_from_clobbers (PATTERN (insn));
6656      note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6657    }
6658}
6659
6660/* If modifying X will modify the value in *DATA (which is really an
6661   `rtx *'), indicate that fact by setting the pointed to value to
6662   NULL_RTX.  */
6663
6664static void
6665cse_check_loop_start (x, set, data)
6666     rtx x;
6667     rtx set ATTRIBUTE_UNUSED;
6668     void *data;
6669{
6670  rtx *cse_check_loop_start_value = (rtx *) data;
6671
6672  if (*cse_check_loop_start_value == NULL_RTX
6673      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6674    return;
6675
6676  if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6677      || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6678    *cse_check_loop_start_value = NULL_RTX;
6679}
6680
6681/* X is a SET or CLOBBER contained in INSN that was found near the start of
6682   a loop that starts with the label at LOOP_START.
6683
6684   If X is a SET, we see if its SET_SRC is currently in our hash table.
6685   If so, we see if it has a value equal to some register used only in the
6686   loop exit code (as marked by jump.c).
6687
6688   If those two conditions are true, we search backwards from the start of
6689   the loop to see if that same value was loaded into a register that still
6690   retains its value at the start of the loop.
6691
6692   If so, we insert an insn after the load to copy the destination of that
6693   load into the equivalent register and (try to) replace our SET_SRC with that
6694   register.
6695
6696   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
6697
6698static void
6699cse_set_around_loop (x, insn, loop_start)
6700     rtx x;
6701     rtx insn;
6702     rtx loop_start;
6703{
6704  struct table_elt *src_elt;
6705
6706  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6707     are setting PC or CC0 or whose SET_SRC is already a register.  */
6708  if (GET_CODE (x) == SET
6709      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6710      && GET_CODE (SET_SRC (x)) != REG)
6711    {
6712      src_elt = lookup (SET_SRC (x),
6713			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6714			GET_MODE (SET_DEST (x)));
6715
6716      if (src_elt)
6717	for (src_elt = src_elt->first_same_value; src_elt;
6718	     src_elt = src_elt->next_same_value)
6719	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6720	      && COST (src_elt->exp) < COST (SET_SRC (x)))
6721	    {
6722	      rtx p, set;
6723
6724	      /* Look for an insn in front of LOOP_START that sets
6725		 something in the desired mode to SET_SRC (x) before we hit
6726		 a label or CALL_INSN.  */
6727
6728	      for (p = prev_nonnote_insn (loop_start);
6729		   p && GET_CODE (p) != CALL_INSN
6730		   && GET_CODE (p) != CODE_LABEL;
6731		   p = prev_nonnote_insn  (p))
6732		if ((set = single_set (p)) != 0
6733		    && GET_CODE (SET_DEST (set)) == REG
6734		    && GET_MODE (SET_DEST (set)) == src_elt->mode
6735		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6736		  {
6737		    /* We now have to ensure that nothing between P
6738		       and LOOP_START modified anything referenced in
6739		       SET_SRC (x).  We know that nothing within the loop
6740		       can modify it, or we would have invalidated it in
6741		       the hash table.  */
6742		    rtx q;
6743		    rtx cse_check_loop_start_value = SET_SRC (x);
6744		    for (q = p; q != loop_start; q = NEXT_INSN (q))
6745		      if (INSN_P (q))
6746			note_stores (PATTERN (q),
6747				     cse_check_loop_start,
6748				     &cse_check_loop_start_value);
6749
6750		    /* If nothing was changed and we can replace our
6751		       SET_SRC, add an insn after P to copy its destination
6752		       to what we will be replacing SET_SRC with.  */
6753		    if (cse_check_loop_start_value
6754			&& validate_change (insn, &SET_SRC (x),
6755					    src_elt->exp, 0))
6756		      {
6757			/* If this creates new pseudos, this is unsafe,
6758			   because the regno of new pseudo is unsuitable
6759			   to index into reg_qty when cse_insn processes
6760			   the new insn.  Therefore, if a new pseudo was
6761			   created, discard this optimization.  */
6762			int nregs = max_reg_num ();
6763			rtx move
6764			  = gen_move_insn (src_elt->exp, SET_DEST (set));
6765			if (nregs != max_reg_num ())
6766			  {
6767			    if (! validate_change (insn, &SET_SRC (x),
6768						   SET_SRC (set), 0))
6769			      abort ();
6770			  }
6771			else
6772			  emit_insn_after (move, p);
6773		      }
6774		    break;
6775		  }
6776	    }
6777    }
6778
6779  /* Deal with the destination of X affecting the stack pointer.  */
6780  addr_affects_sp_p (SET_DEST (x));
6781
6782  /* See comment on similar code in cse_insn for explanation of these
6783     tests.  */
6784  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6785      || GET_CODE (SET_DEST (x)) == MEM)
6786    invalidate (SET_DEST (x), VOIDmode);
6787  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6788	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6789    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6790}
6791
6792/* Find the end of INSN's basic block and return its range,
6793   the total number of SETs in all the insns of the block, the last insn of the
6794   block, and the branch path.
6795
6796   The branch path indicates which branches should be followed.  If a non-zero
6797   path size is specified, the block should be rescanned and a different set
6798   of branches will be taken.  The branch path is only used if
6799   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6800
6801   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6802   used to describe the block.  It is filled in with the information about
6803   the current block.  The incoming structure's branch path, if any, is used
6804   to construct the output branch path.  */
6805
6806void
6807cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6808     rtx insn;
6809     struct cse_basic_block_data *data;
6810     int follow_jumps;
6811     int after_loop;
6812     int skip_blocks;
6813{
6814  rtx p = insn, q;
6815  int nsets = 0;
6816  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6817  rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6818  int path_size = data->path_size;
6819  int path_entry = 0;
6820  int i;
6821
6822  /* Update the previous branch path, if any.  If the last branch was
6823     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
6824     shorten the path by one and look at the previous branch.  We know that
6825     at least one branch must have been taken if PATH_SIZE is non-zero.  */
6826  while (path_size > 0)
6827    {
6828      if (data->path[path_size - 1].status != NOT_TAKEN)
6829	{
6830	  data->path[path_size - 1].status = NOT_TAKEN;
6831	  break;
6832	}
6833      else
6834	path_size--;
6835    }
6836
6837  /* If the first instruction is marked with QImode, that means we've
6838     already processed this block.  Our caller will look at DATA->LAST
6839     to figure out where to go next.  We want to return the next block
6840     in the instruction stream, not some branched-to block somewhere
6841     else.  We accomplish this by pretending our called forbid us to
6842     follow jumps, or skip blocks.  */
6843  if (GET_MODE (insn) == QImode)
6844    follow_jumps = skip_blocks = 0;
6845
6846  /* Scan to end of this basic block.  */
6847  while (p && GET_CODE (p) != CODE_LABEL)
6848    {
6849      /* Don't cse out the end of a loop.  This makes a difference
6850	 only for the unusual loops that always execute at least once;
6851	 all other loops have labels there so we will stop in any case.
6852	 Cse'ing out the end of the loop is dangerous because it
6853	 might cause an invariant expression inside the loop
6854	 to be reused after the end of the loop.  This would make it
6855	 hard to move the expression out of the loop in loop.c,
6856	 especially if it is one of several equivalent expressions
6857	 and loop.c would like to eliminate it.
6858
6859	 If we are running after loop.c has finished, we can ignore
6860	 the NOTE_INSN_LOOP_END.  */
6861
6862      if (! after_loop && GET_CODE (p) == NOTE
6863	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6864	break;
6865
6866      /* Don't cse over a call to setjmp; on some machines (eg VAX)
6867	 the regs restored by the longjmp come from
6868	 a later time than the setjmp.  */
6869      if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6870	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6871	break;
6872
6873      /* A PARALLEL can have lots of SETs in it,
6874	 especially if it is really an ASM_OPERANDS.  */
6875      if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6876	nsets += XVECLEN (PATTERN (p), 0);
6877      else if (GET_CODE (p) != NOTE)
6878	nsets += 1;
6879
6880      /* Ignore insns made by CSE; they cannot affect the boundaries of
6881	 the basic block.  */
6882
6883      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6884	high_cuid = INSN_CUID (p);
6885      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6886	low_cuid = INSN_CUID (p);
6887
6888      /* See if this insn is in our branch path.  If it is and we are to
6889	 take it, do so.  */
6890      if (path_entry < path_size && data->path[path_entry].branch == p)
6891	{
6892	  if (data->path[path_entry].status != NOT_TAKEN)
6893	    p = JUMP_LABEL (p);
6894
6895	  /* Point to next entry in path, if any.  */
6896	  path_entry++;
6897	}
6898
6899      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6900	 was specified, we haven't reached our maximum path length, there are
6901	 insns following the target of the jump, this is the only use of the
6902	 jump label, and the target label is preceded by a BARRIER.
6903
6904	 Alternatively, we can follow the jump if it branches around a
6905	 block of code and there are no other branches into the block.
6906	 In this case invalidate_skipped_block will be called to invalidate any
6907	 registers set in the block when following the jump.  */
6908
6909      else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6910	       && GET_CODE (p) == JUMP_INSN
6911	       && GET_CODE (PATTERN (p)) == SET
6912	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6913	       && JUMP_LABEL (p) != 0
6914	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6915	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6916	{
6917	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6918	    if ((GET_CODE (q) != NOTE
6919		 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6920		 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6921		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6922		&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6923	      break;
6924
6925	  /* If we ran into a BARRIER, this code is an extension of the
6926	     basic block when the branch is taken.  */
6927	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6928	    {
6929	      /* Don't allow ourself to keep walking around an
6930		 always-executed loop.  */
6931	      if (next_real_insn (q) == next)
6932		{
6933		  p = NEXT_INSN (p);
6934		  continue;
6935		}
6936
6937	      /* Similarly, don't put a branch in our path more than once.  */
6938	      for (i = 0; i < path_entry; i++)
6939		if (data->path[i].branch == p)
6940		  break;
6941
6942	      if (i != path_entry)
6943		break;
6944
6945	      data->path[path_entry].branch = p;
6946	      data->path[path_entry++].status = TAKEN;
6947
6948	      /* This branch now ends our path.  It was possible that we
6949		 didn't see this branch the last time around (when the
6950		 insn in front of the target was a JUMP_INSN that was
6951		 turned into a no-op).  */
6952	      path_size = path_entry;
6953
6954	      p = JUMP_LABEL (p);
6955	      /* Mark block so we won't scan it again later.  */
6956	      PUT_MODE (NEXT_INSN (p), QImode);
6957	    }
6958	  /* Detect a branch around a block of code.  */
6959	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6960	    {
6961	      rtx tmp;
6962
6963	      if (next_real_insn (q) == next)
6964		{
6965		  p = NEXT_INSN (p);
6966		  continue;
6967		}
6968
6969	      for (i = 0; i < path_entry; i++)
6970		if (data->path[i].branch == p)
6971		  break;
6972
6973	      if (i != path_entry)
6974		break;
6975
6976	      /* This is no_labels_between_p (p, q) with an added check for
6977		 reaching the end of a function (in case Q precedes P).  */
6978	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6979		if (GET_CODE (tmp) == CODE_LABEL)
6980		  break;
6981
6982	      if (tmp == q)
6983		{
6984		  data->path[path_entry].branch = p;
6985		  data->path[path_entry++].status = AROUND;
6986
6987		  path_size = path_entry;
6988
6989		  p = JUMP_LABEL (p);
6990		  /* Mark block so we won't scan it again later.  */
6991		  PUT_MODE (NEXT_INSN (p), QImode);
6992		}
6993	    }
6994	}
6995      p = NEXT_INSN (p);
6996    }
6997
6998  data->low_cuid = low_cuid;
6999  data->high_cuid = high_cuid;
7000  data->nsets = nsets;
7001  data->last = p;
7002
7003  /* If all jumps in the path are not taken, set our path length to zero
7004     so a rescan won't be done.  */
7005  for (i = path_size - 1; i >= 0; i--)
7006    if (data->path[i].status != NOT_TAKEN)
7007      break;
7008
7009  if (i == -1)
7010    data->path_size = 0;
7011  else
7012    data->path_size = path_size;
7013
7014  /* End the current branch path.  */
7015  data->path[path_size].branch = 0;
7016}
7017
7018/* Perform cse on the instructions of a function.
7019   F is the first instruction.
7020   NREGS is one plus the highest pseudo-reg number used in the instruction.
7021
7022   AFTER_LOOP is 1 if this is the cse call done after loop optimization
7023   (only if -frerun-cse-after-loop).
7024
7025   Returns 1 if jump_optimize should be redone due to simplifications
7026   in conditional jump instructions.  */
7027
7028int
7029cse_main (f, nregs, after_loop, file)
7030     rtx f;
7031     int nregs;
7032     int after_loop;
7033     FILE *file;
7034{
7035  struct cse_basic_block_data val;
7036  rtx insn = f;
7037  int i;
7038
7039  cse_jumps_altered = 0;
7040  recorded_label_ref = 0;
7041  constant_pool_entries_cost = 0;
7042  val.path_size = 0;
7043
7044  init_recog ();
7045  init_alias_analysis ();
7046
7047  max_reg = nregs;
7048
7049  max_insn_uid = get_max_uid ();
7050
7051  reg_eqv_table = (struct reg_eqv_elem *)
7052    xmalloc (nregs * sizeof (struct reg_eqv_elem));
7053
7054#ifdef LOAD_EXTEND_OP
7055
7056  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
7057     and change the code and mode as appropriate.  */
7058  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7059#endif
7060
7061  /* Reset the counter indicating how many elements have been made
7062     thus far.  */
7063  n_elements_made = 0;
7064
7065  /* Find the largest uid.  */
7066
7067  max_uid = get_max_uid ();
7068  uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7069
7070  /* Compute the mapping from uids to cuids.
7071     CUIDs are numbers assigned to insns, like uids,
7072     except that cuids increase monotonically through the code.
7073     Don't assign cuids to line-number NOTEs, so that the distance in cuids
7074     between two insns is not affected by -g.  */
7075
7076  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7077    {
7078      if (GET_CODE (insn) != NOTE
7079	  || NOTE_LINE_NUMBER (insn) < 0)
7080	INSN_CUID (insn) = ++i;
7081      else
7082	/* Give a line number note the same cuid as preceding insn.  */
7083	INSN_CUID (insn) = i;
7084    }
7085
7086  ggc_push_context ();
7087
7088  /* Loop over basic blocks.
7089     Compute the maximum number of qty's needed for each basic block
7090     (which is 2 for each SET).  */
7091  insn = f;
7092  while (insn)
7093    {
7094      cse_altered = 0;
7095      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7096			      flag_cse_skip_blocks);
7097
7098      /* If this basic block was already processed or has no sets, skip it.  */
7099      if (val.nsets == 0 || GET_MODE (insn) == QImode)
7100	{
7101	  PUT_MODE (insn, VOIDmode);
7102	  insn = (val.last ? NEXT_INSN (val.last) : 0);
7103	  val.path_size = 0;
7104	  continue;
7105	}
7106
7107      cse_basic_block_start = val.low_cuid;
7108      cse_basic_block_end = val.high_cuid;
7109      max_qty = val.nsets * 2;
7110
7111      if (file)
7112	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7113		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7114		 val.nsets);
7115
7116      /* Make MAX_QTY bigger to give us room to optimize
7117	 past the end of this basic block, if that should prove useful.  */
7118      if (max_qty < 500)
7119	max_qty = 500;
7120
7121      max_qty += max_reg;
7122
7123      /* If this basic block is being extended by following certain jumps,
7124         (see `cse_end_of_basic_block'), we reprocess the code from the start.
7125         Otherwise, we start after this basic block.  */
7126      if (val.path_size > 0)
7127	cse_basic_block (insn, val.last, val.path, 0);
7128      else
7129	{
7130	  int old_cse_jumps_altered = cse_jumps_altered;
7131	  rtx temp;
7132
7133	  /* When cse changes a conditional jump to an unconditional
7134	     jump, we want to reprocess the block, since it will give
7135	     us a new branch path to investigate.  */
7136	  cse_jumps_altered = 0;
7137	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7138	  if (cse_jumps_altered == 0
7139	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7140	    insn = temp;
7141
7142	  cse_jumps_altered |= old_cse_jumps_altered;
7143	}
7144
7145      if (cse_altered)
7146	ggc_collect ();
7147
7148#ifdef USE_C_ALLOCA
7149      alloca (0);
7150#endif
7151    }
7152
7153  ggc_pop_context ();
7154
7155  if (max_elements_made < n_elements_made)
7156    max_elements_made = n_elements_made;
7157
7158  /* Clean up.  */
7159  end_alias_analysis ();
7160  free (uid_cuid);
7161  free (reg_eqv_table);
7162
7163  return cse_jumps_altered || recorded_label_ref;
7164}
7165
7166/* Process a single basic block.  FROM and TO and the limits of the basic
7167   block.  NEXT_BRANCH points to the branch path when following jumps or
7168   a null path when not following jumps.
7169
7170   AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7171   loop.  This is true when we are being called for the last time on a
7172   block and this CSE pass is before loop.c.  */
7173
7174static rtx
7175cse_basic_block (from, to, next_branch, around_loop)
7176     rtx from, to;
7177     struct branch_path *next_branch;
7178     int around_loop;
7179{
7180  rtx insn;
7181  int to_usage = 0;
7182  rtx libcall_insn = NULL_RTX;
7183  int num_insns = 0;
7184
7185  /* This array is undefined before max_reg, so only allocate
7186     the space actually needed and adjust the start.  */
7187
7188  qty_table
7189    = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7190					 * sizeof (struct qty_table_elem));
7191  qty_table -= max_reg;
7192
7193  new_basic_block ();
7194
7195  /* TO might be a label.  If so, protect it from being deleted.  */
7196  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7197    ++LABEL_NUSES (to);
7198
7199  for (insn = from; insn != to; insn = NEXT_INSN (insn))
7200    {
7201      enum rtx_code code = GET_CODE (insn);
7202
7203      /* If we have processed 1,000 insns, flush the hash table to
7204	 avoid extreme quadratic behavior.  We must not include NOTEs
7205	 in the count since there may be more of them when generating
7206	 debugging information.  If we clear the table at different
7207	 times, code generated with -g -O might be different than code
7208	 generated with -O but not -g.
7209
7210	 ??? This is a real kludge and needs to be done some other way.
7211	 Perhaps for 2.9.  */
7212      if (code != NOTE && num_insns++ > 1000)
7213	{
7214	  flush_hash_table ();
7215	  num_insns = 0;
7216	}
7217
7218      /* See if this is a branch that is part of the path.  If so, and it is
7219	 to be taken, do so.  */
7220      if (next_branch->branch == insn)
7221	{
7222	  enum taken status = next_branch++->status;
7223	  if (status != NOT_TAKEN)
7224	    {
7225	      if (status == TAKEN)
7226		record_jump_equiv (insn, 1);
7227	      else
7228		invalidate_skipped_block (NEXT_INSN (insn));
7229
7230	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7231		 Then follow this branch.  */
7232#ifdef HAVE_cc0
7233	      prev_insn_cc0 = 0;
7234#endif
7235	      prev_insn = insn;
7236	      insn = JUMP_LABEL (insn);
7237	      continue;
7238	    }
7239	}
7240
7241      if (GET_MODE (insn) == QImode)
7242	PUT_MODE (insn, VOIDmode);
7243
7244      if (GET_RTX_CLASS (code) == 'i')
7245	{
7246	  rtx p;
7247
7248	  /* Process notes first so we have all notes in canonical forms when
7249	     looking for duplicate operations.  */
7250
7251	  if (REG_NOTES (insn))
7252	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7253
7254	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7255	     we do not want to record destinations.  The last insn of a
7256	     LIBCALL block is not considered to be part of the block, since
7257	     its destination is the result of the block and hence should be
7258	     recorded.  */
7259
7260	  if (REG_NOTES (insn) != 0)
7261	    {
7262	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7263		libcall_insn = XEXP (p, 0);
7264	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7265		libcall_insn = 0;
7266	    }
7267
7268	  cse_insn (insn, libcall_insn);
7269
7270	  /* If we haven't already found an insn where we added a LABEL_REF,
7271	     check this one.  */
7272	  if (GET_CODE (insn) == INSN && ! recorded_label_ref
7273	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7274			       (void *) insn))
7275	    recorded_label_ref = 1;
7276	}
7277
7278      /* If INSN is now an unconditional jump, skip to the end of our
7279	 basic block by pretending that we just did the last insn in the
7280	 basic block.  If we are jumping to the end of our block, show
7281	 that we can have one usage of TO.  */
7282
7283      if (any_uncondjump_p (insn))
7284	{
7285	  if (to == 0)
7286	    {
7287	      free (qty_table + max_reg);
7288	      return 0;
7289	    }
7290
7291	  if (JUMP_LABEL (insn) == to)
7292	    to_usage = 1;
7293
7294	  /* Maybe TO was deleted because the jump is unconditional.
7295	     If so, there is nothing left in this basic block.  */
7296	  /* ??? Perhaps it would be smarter to set TO
7297	     to whatever follows this insn,
7298	     and pretend the basic block had always ended here.  */
7299	  if (INSN_DELETED_P (to))
7300	    break;
7301
7302	  insn = PREV_INSN (to);
7303	}
7304
7305      /* See if it is ok to keep on going past the label
7306	 which used to end our basic block.  Remember that we incremented
7307	 the count of that label, so we decrement it here.  If we made
7308	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7309	 want to count the use in that jump.  */
7310
7311      if (to != 0 && NEXT_INSN (insn) == to
7312	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7313	{
7314	  struct cse_basic_block_data val;
7315	  rtx prev;
7316
7317	  insn = NEXT_INSN (to);
7318
7319	  /* If TO was the last insn in the function, we are done.  */
7320	  if (insn == 0)
7321	    {
7322	      free (qty_table + max_reg);
7323	      return 0;
7324	    }
7325
7326	  /* If TO was preceded by a BARRIER we are done with this block
7327	     because it has no continuation.  */
7328	  prev = prev_nonnote_insn (to);
7329	  if (prev && GET_CODE (prev) == BARRIER)
7330	    {
7331	      free (qty_table + max_reg);
7332	      return insn;
7333	    }
7334
7335	  /* Find the end of the following block.  Note that we won't be
7336	     following branches in this case.  */
7337	  to_usage = 0;
7338	  val.path_size = 0;
7339	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
7340
7341	  /* If the tables we allocated have enough space left
7342	     to handle all the SETs in the next basic block,
7343	     continue through it.  Otherwise, return,
7344	     and that block will be scanned individually.  */
7345	  if (val.nsets * 2 + next_qty > max_qty)
7346	    break;
7347
7348	  cse_basic_block_start = val.low_cuid;
7349	  cse_basic_block_end = val.high_cuid;
7350	  to = val.last;
7351
7352	  /* Prevent TO from being deleted if it is a label.  */
7353	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7354	    ++LABEL_NUSES (to);
7355
7356	  /* Back up so we process the first insn in the extension.  */
7357	  insn = PREV_INSN (insn);
7358	}
7359    }
7360
7361  if (next_qty > max_qty)
7362    abort ();
7363
7364  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7365     the previous insn is the only insn that branches to the head of a loop,
7366     we can cse into the loop.  Don't do this if we changed the jump
7367     structure of a loop unless we aren't going to be following jumps.  */
7368
7369  insn = prev_nonnote_insn(to);
7370  if ((cse_jumps_altered == 0
7371       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7372      && around_loop && to != 0
7373      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7374      && GET_CODE (insn) == JUMP_INSN
7375      && JUMP_LABEL (insn) != 0
7376      && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7377    cse_around_loop (JUMP_LABEL (insn));
7378
7379  free (qty_table + max_reg);
7380
7381  return to ? NEXT_INSN (to) : 0;
7382}
7383
7384/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7385   there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7386
7387static int
7388check_for_label_ref (rtl, data)
7389     rtx *rtl;
7390     void *data;
7391{
7392  rtx insn = (rtx) data;
7393
7394  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7395     we must rerun jump since it needs to place the note.  If this is a
7396     LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7397     since no REG_LABEL will be added.  */
7398  return (GET_CODE (*rtl) == LABEL_REF
7399	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7400	  && LABEL_P (XEXP (*rtl, 0))
7401	  && INSN_UID (XEXP (*rtl, 0)) != 0
7402	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7403}
7404
7405/* Count the number of times registers are used (not set) in X.
7406   COUNTS is an array in which we accumulate the count, INCR is how much
7407   we count each register usage.
7408
7409   Don't count a usage of DEST, which is the SET_DEST of a SET which
7410   contains X in its SET_SRC.  This is because such a SET does not
7411   modify the liveness of DEST.  */
7412
7413static void
7414count_reg_usage (x, counts, dest, incr)
7415     rtx x;
7416     int *counts;
7417     rtx dest;
7418     int incr;
7419{
7420  enum rtx_code code;
7421  const char *fmt;
7422  int i, j;
7423
7424  if (x == 0)
7425    return;
7426
7427  switch (code = GET_CODE (x))
7428    {
7429    case REG:
7430      if (x != dest)
7431	counts[REGNO (x)] += incr;
7432      return;
7433
7434    case PC:
7435    case CC0:
7436    case CONST:
7437    case CONST_INT:
7438    case CONST_DOUBLE:
7439    case CONST_VECTOR:
7440    case SYMBOL_REF:
7441    case LABEL_REF:
7442      return;
7443
7444    case CLOBBER:
7445      /* If we are clobbering a MEM, mark any registers inside the address
7446         as being used.  */
7447      if (GET_CODE (XEXP (x, 0)) == MEM)
7448	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7449      return;
7450
7451    case SET:
7452      /* Unless we are setting a REG, count everything in SET_DEST.  */
7453      if (GET_CODE (SET_DEST (x)) != REG)
7454	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7455
7456      /* If SRC has side-effects, then we can't delete this insn, so the
7457	 usage of SET_DEST inside SRC counts.
7458
7459	 ??? Strictly-speaking, we might be preserving this insn
7460	 because some other SET has side-effects, but that's hard
7461	 to do and can't happen now.  */
7462      count_reg_usage (SET_SRC (x), counts,
7463		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7464		       incr);
7465      return;
7466
7467    case CALL_INSN:
7468      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7469      /* Fall through.  */
7470
7471    case INSN:
7472    case JUMP_INSN:
7473      count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7474
7475      /* Things used in a REG_EQUAL note aren't dead since loop may try to
7476	 use them.  */
7477
7478      count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7479      return;
7480
7481    case EXPR_LIST:
7482    case INSN_LIST:
7483      if (REG_NOTE_KIND (x) == REG_EQUAL
7484	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7485	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7486      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7487      return;
7488
7489    default:
7490      break;
7491    }
7492
7493  fmt = GET_RTX_FORMAT (code);
7494  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7495    {
7496      if (fmt[i] == 'e')
7497	count_reg_usage (XEXP (x, i), counts, dest, incr);
7498      else if (fmt[i] == 'E')
7499	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7500	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7501    }
7502}
7503
7504/* Return true if set is live.  */
7505static bool
7506set_live_p (set, insn, counts)
7507     rtx set;
7508     rtx insn ATTRIBUTE_UNUSED;	/* Only used with HAVE_cc0.  */
7509     int *counts;
7510{
7511#ifdef HAVE_cc0
7512  rtx tem;
7513#endif
7514
7515  if (set_noop_p (set))
7516    ;
7517
7518#ifdef HAVE_cc0
7519  else if (GET_CODE (SET_DEST (set)) == CC0
7520	   && !side_effects_p (SET_SRC (set))
7521	   && ((tem = next_nonnote_insn (insn)) == 0
7522	       || !INSN_P (tem)
7523	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7524    return false;
7525#endif
7526  else if (GET_CODE (SET_DEST (set)) != REG
7527	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7528	   || counts[REGNO (SET_DEST (set))] != 0
7529	   || side_effects_p (SET_SRC (set))
7530	   /* An ADDRESSOF expression can turn into a use of the
7531	      internal arg pointer, so always consider the
7532	      internal arg pointer live.  If it is truly dead,
7533	      flow will delete the initializing insn.  */
7534	   || (SET_DEST (set) == current_function_internal_arg_pointer))
7535    return true;
7536  return false;
7537}
7538
7539/* Return true if insn is live.  */
7540
7541static bool
7542insn_live_p (insn, counts)
7543     rtx insn;
7544     int *counts;
7545{
7546  int i;
7547  if (GET_CODE (PATTERN (insn)) == SET)
7548    return set_live_p (PATTERN (insn), insn, counts);
7549  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7550    {
7551      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7552	{
7553	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7554
7555	  if (GET_CODE (elt) == SET)
7556	    {
7557	      if (set_live_p (elt, insn, counts))
7558		return true;
7559	    }
7560	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7561	    return true;
7562	}
7563      return false;
7564    }
7565  else
7566    return true;
7567}
7568
7569/* Return true if libcall is dead as a whole.  */
7570
7571static bool
7572dead_libcall_p (insn)
7573     rtx insn;
7574{
7575  rtx note;
7576  /* See if there's a REG_EQUAL note on this insn and try to
7577     replace the source with the REG_EQUAL expression.
7578
7579     We assume that insns with REG_RETVALs can only be reg->reg
7580     copies at this point.  */
7581  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7582  if (note)
7583    {
7584      rtx set = single_set (insn);
7585      rtx new = simplify_rtx (XEXP (note, 0));
7586
7587      if (!new)
7588	new = XEXP (note, 0);
7589
7590      if (set && validate_change (insn, &SET_SRC (set), new, 0))
7591	{
7592	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7593	  return true;
7594	}
7595    }
7596  return false;
7597}
7598
7599/* Scan all the insns and delete any that are dead; i.e., they store a register
7600   that is never used or they copy a register to itself.
7601
7602   This is used to remove insns made obviously dead by cse, loop or other
7603   optimizations.  It improves the heuristics in loop since it won't try to
7604   move dead invariants out of loops or make givs for dead quantities.  The
7605   remaining passes of the compilation are also sped up.  */
7606
7607void
7608delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
7609     rtx insns;
7610     int nreg;
7611     int preserve_basic_blocks;
7612{
7613  int *counts;
7614  rtx insn, prev;
7615  int i;
7616  int in_libcall = 0, dead_libcall = 0;
7617  basic_block bb;
7618
7619  /* First count the number of times each register is used.  */
7620  counts = (int *) xcalloc (nreg, sizeof (int));
7621  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7622    count_reg_usage (insn, counts, NULL_RTX, 1);
7623
7624  /* Go from the last insn to the first and delete insns that only set unused
7625     registers or copy a register to itself.  As we delete an insn, remove
7626     usage counts for registers it uses.
7627
7628     The first jump optimization pass may leave a real insn as the last
7629     insn in the function.   We must not skip that insn or we may end
7630     up deleting code that is not really dead.  */
7631  insn = get_last_insn ();
7632  if (! INSN_P (insn))
7633    insn = prev_real_insn (insn);
7634
7635  if (!preserve_basic_blocks)
7636    for (; insn; insn = prev)
7637      {
7638	int live_insn = 0;
7639
7640	prev = prev_real_insn (insn);
7641
7642	/* Don't delete any insns that are part of a libcall block unless
7643	   we can delete the whole libcall block.
7644
7645	   Flow or loop might get confused if we did that.  Remember
7646	   that we are scanning backwards.  */
7647	if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7648	  {
7649	    in_libcall = 1;
7650	    live_insn = 1;
7651	    dead_libcall = dead_libcall_p (insn);
7652	  }
7653	else if (in_libcall)
7654	  live_insn = ! dead_libcall;
7655	else
7656	  live_insn = insn_live_p (insn, counts);
7657
7658	/* If this is a dead insn, delete it and show registers in it aren't
7659	   being used.  */
7660
7661	if (! live_insn)
7662	  {
7663	    count_reg_usage (insn, counts, NULL_RTX, -1);
7664	    delete_related_insns (insn);
7665	  }
7666
7667	if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7668	  {
7669	    in_libcall = 0;
7670	    dead_libcall = 0;
7671	  }
7672      }
7673  else
7674    for (i = 0; i < n_basic_blocks; i++)
7675      for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
7676	{
7677	  int live_insn = 0;
7678
7679	  prev = PREV_INSN (insn);
7680	  if (!INSN_P (insn))
7681	    continue;
7682
7683	  /* Don't delete any insns that are part of a libcall block unless
7684	     we can delete the whole libcall block.
7685
7686	     Flow or loop might get confused if we did that.  Remember
7687	     that we are scanning backwards.  */
7688	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7689	    {
7690	      in_libcall = 1;
7691	      live_insn = 1;
7692	      dead_libcall = dead_libcall_p (insn);
7693	    }
7694	  else if (in_libcall)
7695	    live_insn = ! dead_libcall;
7696	  else
7697	    live_insn = insn_live_p (insn, counts);
7698
7699	  /* If this is a dead insn, delete it and show registers in it aren't
7700	     being used.  */
7701
7702	  if (! live_insn)
7703	    {
7704	      count_reg_usage (insn, counts, NULL_RTX, -1);
7705	      delete_insn (insn);
7706	    }
7707
7708	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7709	    {
7710	      in_libcall = 0;
7711	      dead_libcall = 0;
7712	    }
7713	}
7714
7715  /* Clean up.  */
7716  free (counts);
7717}
7718