cse.c revision 52284
1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS.  */
24#include "system.h"
25#include <setjmp.h>
26
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "flags.h"
31#include "real.h"
32#include "insn-config.h"
33#include "recog.h"
34#include "expr.h"
35#include "toplev.h"
36#include "output.h"
37#include "splay-tree.h"
38
39/* The basic idea of common subexpression elimination is to go
40   through the code, keeping a record of expressions that would
41   have the same value at the current scan point, and replacing
42   expressions encountered with the cheapest equivalent expression.
43
44   It is too complicated to keep track of the different possibilities
45   when control paths merge; so, at each label, we forget all that is
46   known and start fresh.  This can be described as processing each
47   basic block separately.  Note, however, that these are not quite
48   the same as the basic blocks found by a later pass and used for
49   data flow analysis and register packing.  We do not need to start fresh
50   after a conditional jump instruction if there is no label there.
51
52   We use two data structures to record the equivalent expressions:
53   a hash table for most expressions, and several vectors together
54   with "quantity numbers" to record equivalent (pseudo) registers.
55
56   The use of the special data structure for registers is desirable
57   because it is faster.  It is possible because registers references
58   contain a fairly small number, the register number, taken from
59   a contiguously allocated series, and two register references are
60   identical if they have the same number.  General expressions
61   do not have any such thing, so the only way to retrieve the
62   information recorded on an expression other than a register
63   is to keep it in a hash table.
64
65Registers and "quantity numbers":
66
67   At the start of each basic block, all of the (hardware and pseudo)
68   registers used in the function are given distinct quantity
69   numbers to indicate their contents.  During scan, when the code
70   copies one register into another, we copy the quantity number.
71   When a register is loaded in any other way, we allocate a new
72   quantity number to describe the value generated by this operation.
73   `reg_qty' records what quantity a register is currently thought
74   of as containing.
75
76   All real quantity numbers are greater than or equal to `max_reg'.
77   If register N has not been assigned a quantity, reg_qty[N] will equal N.
78
79   Quantity numbers below `max_reg' do not exist and none of the `qty_...'
80   variables should be referenced with an index below `max_reg'.
81
82   We also maintain a bidirectional chain of registers for each
83   quantity number.  `qty_first_reg', `qty_last_reg',
84   `reg_next_eqv' and `reg_prev_eqv' hold these chains.
85
86   The first register in a chain is the one whose lifespan is least local.
87   Among equals, it is the one that was seen first.
88   We replace any equivalent register with that one.
89
90   If two registers have the same quantity number, it must be true that
91   REG expressions with `qty_mode' must be in the hash table for both
92   registers and must be in the same class.
93
94   The converse is not true.  Since hard registers may be referenced in
95   any mode, two REG expressions might be equivalent in the hash table
96   but not have the same quantity number if the quantity number of one
97   of the registers is not the same mode as those expressions.
98
99Constants and quantity numbers
100
101   When a quantity has a known constant value, that value is stored
102   in the appropriate element of qty_const.  This is in addition to
103   putting the constant in the hash table as is usual for non-regs.
104
105   Whether a reg or a constant is preferred is determined by the configuration
106   macro CONST_COSTS and will often depend on the constant value.  In any
107   event, expressions containing constants can be simplified, by fold_rtx.
108
109   When a quantity has a known nearly constant value (such as an address
110   of a stack slot), that value is stored in the appropriate element
111   of qty_const.
112
113   Integer constants don't have a machine mode.  However, cse
114   determines the intended machine mode from the destination
115   of the instruction that moves the constant.  The machine mode
116   is recorded in the hash table along with the actual RTL
117   constant expression so that different modes are kept separate.
118
119Other expressions:
120
121   To record known equivalences among expressions in general
122   we use a hash table called `table'.  It has a fixed number of buckets
123   that contain chains of `struct table_elt' elements for expressions.
124   These chains connect the elements whose expressions have the same
125   hash codes.
126
127   Other chains through the same elements connect the elements which
128   currently have equivalent values.
129
130   Register references in an expression are canonicalized before hashing
131   the expression.  This is done using `reg_qty' and `qty_first_reg'.
132   The hash code of a register reference is computed using the quantity
133   number, not the register number.
134
135   When the value of an expression changes, it is necessary to remove from the
136   hash table not just that expression but all expressions whose values
137   could be different as a result.
138
139     1. If the value changing is in memory, except in special cases
140     ANYTHING referring to memory could be changed.  That is because
141     nobody knows where a pointer does not point.
142     The function `invalidate_memory' removes what is necessary.
143
144     The special cases are when the address is constant or is
145     a constant plus a fixed register such as the frame pointer
146     or a static chain pointer.  When such addresses are stored in,
147     we can tell exactly which other such addresses must be invalidated
148     due to overlap.  `invalidate' does this.
149     All expressions that refer to non-constant
150     memory addresses are also invalidated.  `invalidate_memory' does this.
151
152     2. If the value changing is a register, all expressions
153     containing references to that register, and only those,
154     must be removed.
155
156   Because searching the entire hash table for expressions that contain
157   a register is very slow, we try to figure out when it isn't necessary.
158   Precisely, this is necessary only when expressions have been
159   entered in the hash table using this register, and then the value has
160   changed, and then another expression wants to be added to refer to
161   the register's new value.  This sequence of circumstances is rare
162   within any one basic block.
163
164   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
165   reg_tick[i] is incremented whenever a value is stored in register i.
166   reg_in_table[i] holds -1 if no references to register i have been
167   entered in the table; otherwise, it contains the value reg_tick[i] had
168   when the references were entered.  If we want to enter a reference
169   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
170   Until we want to enter a new entry, the mere fact that the two vectors
171   don't match makes the entries be ignored if anyone tries to match them.
172
173   Registers themselves are entered in the hash table as well as in
174   the equivalent-register chains.  However, the vectors `reg_tick'
175   and `reg_in_table' do not apply to expressions which are simple
176   register references.  These expressions are removed from the table
177   immediately when they become invalid, and this can be done even if
178   we do not immediately search for all the expressions that refer to
179   the register.
180
181   A CLOBBER rtx in an instruction invalidates its operand for further
182   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
183   invalidates everything that resides in memory.
184
185Related expressions:
186
187   Constant expressions that differ only by an additive integer
188   are called related.  When a constant expression is put in
189   the table, the related expression with no constant term
190   is also entered.  These are made to point at each other
191   so that it is possible to find out if there exists any
192   register equivalent to an expression related to a given expression.  */
193
194/* One plus largest register number used in this function.  */
195
196static int max_reg;
197
198/* One plus largest instruction UID used in this function at time of
199   cse_main call.  */
200
201static int max_insn_uid;
202
203/* Length of vectors indexed by quantity number.
204   We know in advance we will not need a quantity number this big.  */
205
206static int max_qty;
207
208/* Next quantity number to be allocated.
209   This is 1 + the largest number needed so far.  */
210
211static int next_qty;
212
213/* Indexed by quantity number, gives the first (or last) register
214   in the chain of registers that currently contain this quantity.  */
215
216static int *qty_first_reg;
217static int *qty_last_reg;
218
219/* Index by quantity number, gives the mode of the quantity.  */
220
221static enum machine_mode *qty_mode;
222
223/* Indexed by quantity number, gives the rtx of the constant value of the
224   quantity, or zero if it does not have a known value.
225   A sum of the frame pointer (or arg pointer) plus a constant
226   can also be entered here.  */
227
228static rtx *qty_const;
229
230/* Indexed by qty number, gives the insn that stored the constant value
231   recorded in `qty_const'.  */
232
233static rtx *qty_const_insn;
234
235/* The next three variables are used to track when a comparison between a
236   quantity and some constant or register has been passed.  In that case, we
237   know the results of the comparison in case we see it again.  These variables
238   record a comparison that is known to be true.  */
239
240/* Indexed by qty number, gives the rtx code of a comparison with a known
241   result involving this quantity.  If none, it is UNKNOWN.  */
242static enum rtx_code *qty_comparison_code;
243
244/* Indexed by qty number, gives the constant being compared against in a
245   comparison of known result.  If no such comparison, it is undefined.
246   If the comparison is not with a constant, it is zero.  */
247
248static rtx *qty_comparison_const;
249
250/* Indexed by qty number, gives the quantity being compared against in a
251   comparison of known result.  If no such comparison, if it undefined.
252   If the comparison is not with a register, it is -1.  */
253
254static int *qty_comparison_qty;
255
256#ifdef HAVE_cc0
257/* For machines that have a CC0, we do not record its value in the hash
258   table since its use is guaranteed to be the insn immediately following
259   its definition and any other insn is presumed to invalidate it.
260
261   Instead, we store below the value last assigned to CC0.  If it should
262   happen to be a constant, it is stored in preference to the actual
263   assigned value.  In case it is a constant, we store the mode in which
264   the constant should be interpreted.  */
265
266static rtx prev_insn_cc0;
267static enum machine_mode prev_insn_cc0_mode;
268#endif
269
270/* Previous actual insn.  0 if at first insn of basic block.  */
271
272static rtx prev_insn;
273
274/* Insn being scanned.  */
275
276static rtx this_insn;
277
278/* Index by register number, gives the number of the next (or
279   previous) register in the chain of registers sharing the same
280   value.
281
282   Or -1 if this register is at the end of the chain.
283
284   If reg_qty[N] == N, reg_next_eqv[N] is undefined.  */
285
286static int *reg_next_eqv;
287static int *reg_prev_eqv;
288
289struct cse_reg_info {
290  union {
291    /* The number of times the register has been altered in the current
292       basic block.  */
293    int reg_tick;
294
295    /* The next cse_reg_info structure in the free list.  */
296    struct cse_reg_info* next;
297  } variant;
298
299  /* The REG_TICK value at which rtx's containing this register are
300     valid in the hash table.  If this does not equal the current
301     reg_tick value, such expressions existing in the hash table are
302     invalid.  */
303  int reg_in_table;
304
305  /* The quantity number of the register's current contents.  */
306  int reg_qty;
307};
308
309/* A free list of cse_reg_info entries.  */
310static struct cse_reg_info *cse_reg_info_free_list;
311
312/* A mapping from registers to cse_reg_info data structures.  */
313static splay_tree cse_reg_info_tree;
314
315/* The last lookup we did into the cse_reg_info_tree.  This allows us
316   to cache repeated lookups.  */
317static int cached_regno;
318static struct cse_reg_info *cached_cse_reg_info;
319
320/* A HARD_REG_SET containing all the hard registers for which there is
321   currently a REG expression in the hash table.  Note the difference
322   from the above variables, which indicate if the REG is mentioned in some
323   expression in the table.  */
324
325static HARD_REG_SET hard_regs_in_table;
326
327/* A HARD_REG_SET containing all the hard registers that are invalidated
328   by a CALL_INSN.  */
329
330static HARD_REG_SET regs_invalidated_by_call;
331
332/* CUID of insn that starts the basic block currently being cse-processed.  */
333
334static int cse_basic_block_start;
335
336/* CUID of insn that ends the basic block currently being cse-processed.  */
337
338static int cse_basic_block_end;
339
340/* Vector mapping INSN_UIDs to cuids.
341   The cuids are like uids but increase monotonically always.
342   We use them to see whether a reg is used outside a given basic block.  */
343
344static int *uid_cuid;
345
346/* Highest UID in UID_CUID.  */
347static int max_uid;
348
349/* Get the cuid of an insn.  */
350
351#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
352
353/* Nonzero if cse has altered conditional jump insns
354   in such a way that jump optimization should be redone.  */
355
356static int cse_jumps_altered;
357
358/* Nonzero if we put a LABEL_REF into the hash table.  Since we may have put
359   it into an INSN without a REG_LABEL, we have to rerun jump after CSE
360   to put in the note.  */
361static int recorded_label_ref;
362
363/* canon_hash stores 1 in do_not_record
364   if it notices a reference to CC0, PC, or some other volatile
365   subexpression.  */
366
367static int do_not_record;
368
369#ifdef LOAD_EXTEND_OP
370
371/* Scratch rtl used when looking for load-extended copy of a MEM.  */
372static rtx memory_extend_rtx;
373#endif
374
375/* canon_hash stores 1 in hash_arg_in_memory
376   if it notices a reference to memory within the expression being hashed.  */
377
378static int hash_arg_in_memory;
379
380/* canon_hash stores 1 in hash_arg_in_struct
381   if it notices a reference to memory that's part of a structure.  */
382
383static int hash_arg_in_struct;
384
385/* The hash table contains buckets which are chains of `struct table_elt's,
386   each recording one expression's information.
387   That expression is in the `exp' field.
388
389   Those elements with the same hash code are chained in both directions
390   through the `next_same_hash' and `prev_same_hash' fields.
391
392   Each set of expressions with equivalent values
393   are on a two-way chain through the `next_same_value'
394   and `prev_same_value' fields, and all point with
395   the `first_same_value' field at the first element in
396   that chain.  The chain is in order of increasing cost.
397   Each element's cost value is in its `cost' field.
398
399   The `in_memory' field is nonzero for elements that
400   involve any reference to memory.  These elements are removed
401   whenever a write is done to an unidentified location in memory.
402   To be safe, we assume that a memory address is unidentified unless
403   the address is either a symbol constant or a constant plus
404   the frame pointer or argument pointer.
405
406   The `in_struct' field is nonzero for elements that
407   involve any reference to memory inside a structure or array.
408
409   The `related_value' field is used to connect related expressions
410   (that differ by adding an integer).
411   The related expressions are chained in a circular fashion.
412   `related_value' is zero for expressions for which this
413   chain is not useful.
414
415   The `cost' field stores the cost of this element's expression.
416
417   The `is_const' flag is set if the element is a constant (including
418   a fixed address).
419
420   The `flag' field is used as a temporary during some search routines.
421
422   The `mode' field is usually the same as GET_MODE (`exp'), but
423   if `exp' is a CONST_INT and has no machine mode then the `mode'
424   field is the mode it was being used as.  Each constant is
425   recorded separately for each mode it is used with.  */
426
427
428struct table_elt
429{
430  rtx exp;
431  struct table_elt *next_same_hash;
432  struct table_elt *prev_same_hash;
433  struct table_elt *next_same_value;
434  struct table_elt *prev_same_value;
435  struct table_elt *first_same_value;
436  struct table_elt *related_value;
437  int cost;
438  enum machine_mode mode;
439  char in_memory;
440  char in_struct;
441  char is_const;
442  char flag;
443};
444
445/* We don't want a lot of buckets, because we rarely have very many
446   things stored in the hash table, and a lot of buckets slows
447   down a lot of loops that happen frequently.  */
448#define NBUCKETS 31
449
450/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
451   register (hard registers may require `do_not_record' to be set).  */
452
453#define HASH(X, M)	\
454 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
455  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS	\
456  : canon_hash (X, M) % NBUCKETS)
457
458/* Determine whether register number N is considered a fixed register for CSE.
459   It is desirable to replace other regs with fixed regs, to reduce need for
460   non-fixed hard regs.
461   A reg wins if it is either the frame pointer or designated as fixed,
462   but not if it is an overlapping register.  */
463#ifdef OVERLAPPING_REGNO_P
464#define FIXED_REGNO_P(N)  \
465  (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466    || fixed_regs[N] || global_regs[N])	  \
467   && ! OVERLAPPING_REGNO_P ((N)))
468#else
469#define FIXED_REGNO_P(N)  \
470  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
471   || fixed_regs[N] || global_regs[N])
472#endif
473
474/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
475   hard registers and pointers into the frame are the cheapest with a cost
476   of 0.  Next come pseudos with a cost of one and other hard registers with
477   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
478
479#define CHEAP_REGNO(N) \
480  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
481   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
482   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
483   || ((N) < FIRST_PSEUDO_REGISTER					\
484       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
485
486/* A register is cheap if it is a user variable assigned to the register
487   or if its register number always corresponds to a cheap register.  */
488
489#define CHEAP_REG(N) \
490  ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER)	\
491   || CHEAP_REGNO (REGNO (N)))
492
493#define COST(X)								\
494  (GET_CODE (X) == REG							\
495   ? (CHEAP_REG (X) ? 0							\
496      : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1				\
497      : 2)								\
498   : notreg_cost(X))
499
500/* Get the info associated with register N.  */
501
502#define GET_CSE_REG_INFO(N) 			\
503  (((N) == cached_regno && cached_cse_reg_info)	\
504   ? cached_cse_reg_info : get_cse_reg_info ((N)))
505
506/* Get the number of times this register has been updated in this
507   basic block.  */
508
509#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->variant.reg_tick)
510
511/* Get the point at which REG was recorded in the table.  */
512
513#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
514
515/* Get the quantity number for REG.  */
516
517#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
518
519/* Determine if the quantity number for register X represents a valid index
520   into the `qty_...' variables.  */
521
522#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
523
524#ifdef ADDRESS_COST
525/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
526   during CSE, such nodes are present.  Using an ADDRESSOF node which
527   refers to the address of a REG is a good thing because we can then
528   turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
529#define CSE_ADDRESS_COST(RTX)					\
530  ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0)))	\
531   ? -1 : ADDRESS_COST(RTX))
532#endif
533
534static struct table_elt *table[NBUCKETS];
535
536/* Chain of `struct table_elt's made so far for this function
537   but currently removed from the table.  */
538
539static struct table_elt *free_element_chain;
540
541/* Number of `struct table_elt' structures made so far for this function.  */
542
543static int n_elements_made;
544
545/* Maximum value `n_elements_made' has had so far in this compilation
546   for functions previously processed.  */
547
548static int max_elements_made;
549
550/* Surviving equivalence class when two equivalence classes are merged
551   by recording the effects of a jump in the last insn.  Zero if the
552   last insn was not a conditional jump.  */
553
554static struct table_elt *last_jump_equiv_class;
555
556/* Set to the cost of a constant pool reference if one was found for a
557   symbolic constant.  If this was found, it means we should try to
558   convert constants into constant pool entries if they don't fit in
559   the insn.  */
560
561static int constant_pool_entries_cost;
562
563/* Define maximum length of a branch path.  */
564
565#define PATHLENGTH	10
566
567/* This data describes a block that will be processed by cse_basic_block.  */
568
569struct cse_basic_block_data {
570  /* Lowest CUID value of insns in block.  */
571  int low_cuid;
572  /* Highest CUID value of insns in block.  */
573  int high_cuid;
574  /* Total number of SETs in block.  */
575  int nsets;
576  /* Last insn in the block.  */
577  rtx last;
578  /* Size of current branch path, if any.  */
579  int path_size;
580  /* Current branch path, indicating which branches will be taken.  */
581  struct branch_path {
582    /* The branch insn.  */
583    rtx branch;
584    /* Whether it should be taken or not.  AROUND is the same as taken
585       except that it is used when the destination label is not preceded
586       by a BARRIER.  */
587    enum taken {TAKEN, NOT_TAKEN, AROUND} status;
588  } path[PATHLENGTH];
589};
590
591/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
592   virtual regs here because the simplify_*_operation routines are called
593   by integrate.c, which is called before virtual register instantiation.  */
594
595#define FIXED_BASE_PLUS_P(X)					\
596  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
597   || (X) == arg_pointer_rtx					\
598   || (X) == virtual_stack_vars_rtx				\
599   || (X) == virtual_incoming_args_rtx				\
600   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
601       && (XEXP (X, 0) == frame_pointer_rtx			\
602	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
603	   || XEXP (X, 0) == arg_pointer_rtx			\
604	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
605	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
606   || GET_CODE (X) == ADDRESSOF)
607
608/* Similar, but also allows reference to the stack pointer.
609
610   This used to include FIXED_BASE_PLUS_P, however, we can't assume that
611   arg_pointer_rtx by itself is nonzero, because on at least one machine,
612   the i960, the arg pointer is zero when it is unused.  */
613
614#define NONZERO_BASE_PLUS_P(X)					\
615  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
616   || (X) == virtual_stack_vars_rtx				\
617   || (X) == virtual_incoming_args_rtx				\
618   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
619       && (XEXP (X, 0) == frame_pointer_rtx			\
620	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
621	   || XEXP (X, 0) == arg_pointer_rtx			\
622	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
623	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
624   || (X) == stack_pointer_rtx					\
625   || (X) == virtual_stack_dynamic_rtx				\
626   || (X) == virtual_outgoing_args_rtx				\
627   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628       && (XEXP (X, 0) == stack_pointer_rtx			\
629	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
630	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
631   || GET_CODE (X) == ADDRESSOF)
632
633static int notreg_cost		PROTO((rtx));
634static void new_basic_block	PROTO((void));
635static void make_new_qty	PROTO((int));
636static void make_regs_eqv	PROTO((int, int));
637static void delete_reg_equiv	PROTO((int));
638static int mention_regs		PROTO((rtx));
639static int insert_regs		PROTO((rtx, struct table_elt *, int));
640static void free_element	PROTO((struct table_elt *));
641static void remove_from_table	PROTO((struct table_elt *, unsigned));
642static struct table_elt *get_element PROTO((void));
643static struct table_elt *lookup	PROTO((rtx, unsigned, enum machine_mode)),
644       *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
645static rtx lookup_as_function	PROTO((rtx, enum rtx_code));
646static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
647				       enum machine_mode));
648static void merge_equiv_classes PROTO((struct table_elt *,
649				       struct table_elt *));
650static void invalidate		PROTO((rtx, enum machine_mode));
651static int cse_rtx_varies_p	PROTO((rtx));
652static void remove_invalid_refs	PROTO((int));
653static void remove_invalid_subreg_refs	PROTO((int, int, enum machine_mode));
654static void rehash_using_reg	PROTO((rtx));
655static void invalidate_memory	PROTO((void));
656static void invalidate_for_call	PROTO((void));
657static rtx use_related_value	PROTO((rtx, struct table_elt *));
658static unsigned canon_hash	PROTO((rtx, enum machine_mode));
659static unsigned safe_hash	PROTO((rtx, enum machine_mode));
660static int exp_equiv_p		PROTO((rtx, rtx, int, int));
661static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
662						     HOST_WIDE_INT *,
663						     HOST_WIDE_INT *));
664static int refers_to_p		PROTO((rtx, rtx));
665static rtx canon_reg		PROTO((rtx, rtx));
666static void find_best_addr	PROTO((rtx, rtx *));
667static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
668						 enum machine_mode *,
669						 enum machine_mode *));
670static rtx cse_gen_binary	PROTO((enum rtx_code, enum machine_mode,
671				       rtx, rtx));
672static rtx simplify_plus_minus	PROTO((enum rtx_code, enum machine_mode,
673				       rtx, rtx));
674static rtx fold_rtx		PROTO((rtx, rtx));
675static rtx equiv_constant	PROTO((rtx));
676static void record_jump_equiv	PROTO((rtx, int));
677static void record_jump_cond	PROTO((enum rtx_code, enum machine_mode,
678				       rtx, rtx, int));
679static void cse_insn		PROTO((rtx, rtx));
680static int note_mem_written	PROTO((rtx));
681static void invalidate_from_clobbers PROTO((rtx));
682static rtx cse_process_notes	PROTO((rtx, rtx));
683static void cse_around_loop	PROTO((rtx));
684static void invalidate_skipped_set PROTO((rtx, rtx));
685static void invalidate_skipped_block PROTO((rtx));
686static void cse_check_loop_start PROTO((rtx, rtx));
687static void cse_set_around_loop	PROTO((rtx, rtx, rtx));
688static rtx cse_basic_block	PROTO((rtx, rtx, struct branch_path *, int));
689static void count_reg_usage	PROTO((rtx, int *, rtx, int));
690extern void dump_class          PROTO((struct table_elt*));
691static void check_fold_consts	PROTO((PTR));
692static struct cse_reg_info* get_cse_reg_info PROTO((int));
693static void free_cse_reg_info   PROTO((splay_tree_value));
694static void flush_hash_table	PROTO((void));
695
696extern int rtx_equal_function_value_matters;
697
698/* Dump the expressions in the equivalence class indicated by CLASSP.
699   This function is used only for debugging.  */
700void
701dump_class (classp)
702     struct table_elt *classp;
703{
704  struct table_elt *elt;
705
706  fprintf (stderr, "Equivalence chain for ");
707  print_rtl (stderr, classp->exp);
708  fprintf (stderr, ": \n");
709
710  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
711    {
712      print_rtl (stderr, elt->exp);
713      fprintf (stderr, "\n");
714    }
715}
716
717/* Return an estimate of the cost of computing rtx X.
718   One use is in cse, to decide which expression to keep in the hash table.
719   Another is in rtl generation, to pick the cheapest way to multiply.
720   Other uses like the latter are expected in the future.  */
721
722/* Internal function, to compute cost when X is not a register; called
723   from COST macro to keep it simple.  */
724
725static int
726notreg_cost (x)
727     rtx x;
728{
729  return ((GET_CODE (x) == SUBREG
730	   && GET_CODE (SUBREG_REG (x)) == REG
731	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
732	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
733	   && (GET_MODE_SIZE (GET_MODE (x))
734	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
735	   && subreg_lowpart_p (x)
736	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
737				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
738	  ? (CHEAP_REG (SUBREG_REG (x)) ? 0
739	     : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
740		: 2))
741	  : rtx_cost (x, SET) * 2);
742}
743
744/* Return the right cost to give to an operation
745   to make the cost of the corresponding register-to-register instruction
746   N times that of a fast register-to-register instruction.  */
747
748#define COSTS_N_INSNS(N) ((N) * 4 - 2)
749
750int
751rtx_cost (x, outer_code)
752     rtx x;
753     enum rtx_code outer_code ATTRIBUTE_UNUSED;
754{
755  register int i, j;
756  register enum rtx_code code;
757  register char *fmt;
758  register int total;
759
760  if (x == 0)
761    return 0;
762
763  /* Compute the default costs of certain things.
764     Note that RTX_COSTS can override the defaults.  */
765
766  code = GET_CODE (x);
767  switch (code)
768    {
769    case MULT:
770      /* Count multiplication by 2**n as a shift,
771	 because if we are considering it, we would output it as a shift.  */
772      if (GET_CODE (XEXP (x, 1)) == CONST_INT
773	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
774	total = 2;
775      else
776	total = COSTS_N_INSNS (5);
777      break;
778    case DIV:
779    case UDIV:
780    case MOD:
781    case UMOD:
782      total = COSTS_N_INSNS (7);
783      break;
784    case USE:
785      /* Used in loop.c and combine.c as a marker.  */
786      total = 0;
787      break;
788    case ASM_OPERANDS:
789      /* We don't want these to be used in substitutions because
790	 we have no way of validating the resulting insn.  So assign
791	 anything containing an ASM_OPERANDS a very high cost.  */
792      total = 1000;
793      break;
794    default:
795      total = 2;
796    }
797
798  switch (code)
799    {
800    case REG:
801      return ! CHEAP_REG (x);
802
803    case SUBREG:
804      /* If we can't tie these modes, make this expensive.  The larger
805	 the mode, the more expensive it is.  */
806      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
807	return COSTS_N_INSNS (2
808			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
809      return 2;
810#ifdef RTX_COSTS
811      RTX_COSTS (x, code, outer_code);
812#endif
813#ifdef CONST_COSTS
814      CONST_COSTS (x, code, outer_code);
815#endif
816
817    default:
818#ifdef DEFAULT_RTX_COSTS
819      DEFAULT_RTX_COSTS(x, code, outer_code);
820#endif
821      break;
822    }
823
824  /* Sum the costs of the sub-rtx's, plus cost of this operation,
825     which is already in total.  */
826
827  fmt = GET_RTX_FORMAT (code);
828  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
829    if (fmt[i] == 'e')
830      total += rtx_cost (XEXP (x, i), code);
831    else if (fmt[i] == 'E')
832      for (j = 0; j < XVECLEN (x, i); j++)
833	total += rtx_cost (XVECEXP (x, i, j), code);
834
835  return total;
836}
837
838static struct cse_reg_info *
839get_cse_reg_info (regno)
840     int regno;
841{
842  struct cse_reg_info *cri;
843  splay_tree_node n;
844
845  /* See if we already have this entry.  */
846  n = splay_tree_lookup (cse_reg_info_tree,
847			(splay_tree_key) regno);
848  if (n)
849    cri = (struct cse_reg_info *) (n->value);
850  else
851    {
852      /* Get a new cse_reg_info structure.  */
853      if (cse_reg_info_free_list)
854	{
855	  cri = cse_reg_info_free_list;
856	  cse_reg_info_free_list = cri->variant.next;
857	}
858      else
859	cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
860
861      /* Initialize it.  */
862      cri->variant.reg_tick = 0;
863      cri->reg_in_table = -1;
864      cri->reg_qty = regno;
865
866      splay_tree_insert (cse_reg_info_tree,
867			 (splay_tree_key) regno,
868			 (splay_tree_value) cri);
869    }
870
871  /* Cache this lookup; we tend to be looking up information about the
872     same register several times in a row.  */
873  cached_regno = regno;
874  cached_cse_reg_info = cri;
875
876  return cri;
877}
878
879static void
880free_cse_reg_info (v)
881     splay_tree_value v;
882{
883  struct cse_reg_info *cri = (struct cse_reg_info *) v;
884
885  cri->variant.next = cse_reg_info_free_list;
886  cse_reg_info_free_list = cri;
887}
888
889/* Clear the hash table and initialize each register with its own quantity,
890   for a new basic block.  */
891
892static void
893new_basic_block ()
894{
895  register int i;
896
897  next_qty = max_reg;
898
899  if (cse_reg_info_tree)
900    {
901      splay_tree_delete (cse_reg_info_tree);
902      cached_cse_reg_info = 0;
903    }
904
905  cse_reg_info_tree = splay_tree_new (splay_tree_compare_ints, 0,
906				      free_cse_reg_info);
907
908  CLEAR_HARD_REG_SET (hard_regs_in_table);
909
910  /* The per-quantity values used to be initialized here, but it is
911     much faster to initialize each as it is made in `make_new_qty'.  */
912
913  for (i = 0; i < NBUCKETS; i++)
914    {
915      register struct table_elt *this, *next;
916      for (this = table[i]; this; this = next)
917	{
918	  next = this->next_same_hash;
919	  free_element (this);
920	}
921    }
922
923  bzero ((char *) table, sizeof table);
924
925  prev_insn = 0;
926
927#ifdef HAVE_cc0
928  prev_insn_cc0 = 0;
929#endif
930}
931
932/* Say that register REG contains a quantity not in any register before
933   and initialize that quantity.  */
934
935static void
936make_new_qty (reg)
937     register int reg;
938{
939  register int q;
940
941  if (next_qty >= max_qty)
942    abort ();
943
944  q = REG_QTY (reg) = next_qty++;
945  qty_first_reg[q] = reg;
946  qty_last_reg[q] = reg;
947  qty_const[q] = qty_const_insn[q] = 0;
948  qty_comparison_code[q] = UNKNOWN;
949
950  reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
951}
952
953/* Make reg NEW equivalent to reg OLD.
954   OLD is not changing; NEW is.  */
955
956static void
957make_regs_eqv (new, old)
958     register int new, old;
959{
960  register int lastr, firstr;
961  register int q = REG_QTY (old);
962
963  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
964  if (! REGNO_QTY_VALID_P (old))
965    abort ();
966
967  REG_QTY (new) = q;
968  firstr = qty_first_reg[q];
969  lastr = qty_last_reg[q];
970
971  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
972     hard regs.  Among pseudos, if NEW will live longer than any other reg
973     of the same qty, and that is beyond the current basic block,
974     make it the new canonical replacement for this qty.  */
975  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
976      /* Certain fixed registers might be of the class NO_REGS.  This means
977	 that not only can they not be allocated by the compiler, but
978	 they cannot be used in substitutions or canonicalizations
979	 either.  */
980      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
981      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
982	  || (new >= FIRST_PSEUDO_REGISTER
983	      && (firstr < FIRST_PSEUDO_REGISTER
984		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
985		       || (uid_cuid[REGNO_FIRST_UID (new)]
986			   < cse_basic_block_start))
987		      && (uid_cuid[REGNO_LAST_UID (new)]
988			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
989    {
990      reg_prev_eqv[firstr] = new;
991      reg_next_eqv[new] = firstr;
992      reg_prev_eqv[new] = -1;
993      qty_first_reg[q] = new;
994    }
995  else
996    {
997      /* If NEW is a hard reg (known to be non-fixed), insert at end.
998	 Otherwise, insert before any non-fixed hard regs that are at the
999	 end.  Registers of class NO_REGS cannot be used as an
1000	 equivalent for anything.  */
1001      while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1002	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1003	     && new >= FIRST_PSEUDO_REGISTER)
1004	lastr = reg_prev_eqv[lastr];
1005      reg_next_eqv[new] = reg_next_eqv[lastr];
1006      if (reg_next_eqv[lastr] >= 0)
1007	reg_prev_eqv[reg_next_eqv[lastr]] = new;
1008      else
1009	qty_last_reg[q] = new;
1010      reg_next_eqv[lastr] = new;
1011      reg_prev_eqv[new] = lastr;
1012    }
1013}
1014
1015/* Remove REG from its equivalence class.  */
1016
1017static void
1018delete_reg_equiv (reg)
1019     register int reg;
1020{
1021  register int q = REG_QTY (reg);
1022  register int p, n;
1023
1024  /* If invalid, do nothing.  */
1025  if (q == reg)
1026    return;
1027
1028  p = reg_prev_eqv[reg];
1029  n = reg_next_eqv[reg];
1030
1031  if (n != -1)
1032    reg_prev_eqv[n] = p;
1033  else
1034    qty_last_reg[q] = p;
1035  if (p != -1)
1036    reg_next_eqv[p] = n;
1037  else
1038    qty_first_reg[q] = n;
1039
1040  REG_QTY (reg) = reg;
1041}
1042
1043/* Remove any invalid expressions from the hash table
1044   that refer to any of the registers contained in expression X.
1045
1046   Make sure that newly inserted references to those registers
1047   as subexpressions will be considered valid.
1048
1049   mention_regs is not called when a register itself
1050   is being stored in the table.
1051
1052   Return 1 if we have done something that may have changed the hash code
1053   of X.  */
1054
1055static int
1056mention_regs (x)
1057     rtx x;
1058{
1059  register enum rtx_code code;
1060  register int i, j;
1061  register char *fmt;
1062  register int changed = 0;
1063
1064  if (x == 0)
1065    return 0;
1066
1067  code = GET_CODE (x);
1068  if (code == REG)
1069    {
1070      register int regno = REGNO (x);
1071      register int endregno
1072	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1073		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1074      int i;
1075
1076      for (i = regno; i < endregno; i++)
1077	{
1078	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1079	    remove_invalid_refs (i);
1080
1081	  REG_IN_TABLE (i) = REG_TICK (i);
1082	}
1083
1084      return 0;
1085    }
1086
1087  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1088     pseudo if they don't use overlapping words.  We handle only pseudos
1089     here for simplicity.  */
1090  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1091      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1092    {
1093      int i = REGNO (SUBREG_REG (x));
1094
1095      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1096	{
1097	  /* If reg_tick has been incremented more than once since
1098	     reg_in_table was last set, that means that the entire
1099	     register has been set before, so discard anything memorized
1100	     for the entrire register, including all SUBREG expressions.  */
1101	  if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1102	    remove_invalid_refs (i);
1103	  else
1104	    remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1105	}
1106
1107      REG_IN_TABLE (i) = REG_TICK (i);
1108      return 0;
1109    }
1110
1111  /* If X is a comparison or a COMPARE and either operand is a register
1112     that does not have a quantity, give it one.  This is so that a later
1113     call to record_jump_equiv won't cause X to be assigned a different
1114     hash code and not found in the table after that call.
1115
1116     It is not necessary to do this here, since rehash_using_reg can
1117     fix up the table later, but doing this here eliminates the need to
1118     call that expensive function in the most common case where the only
1119     use of the register is in the comparison.  */
1120
1121  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1122    {
1123      if (GET_CODE (XEXP (x, 0)) == REG
1124	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1125	if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1126	  {
1127	    rehash_using_reg (XEXP (x, 0));
1128	    changed = 1;
1129	  }
1130
1131      if (GET_CODE (XEXP (x, 1)) == REG
1132	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1133	if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1134	  {
1135	    rehash_using_reg (XEXP (x, 1));
1136	    changed = 1;
1137	  }
1138    }
1139
1140  fmt = GET_RTX_FORMAT (code);
1141  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1142    if (fmt[i] == 'e')
1143      changed |= mention_regs (XEXP (x, i));
1144    else if (fmt[i] == 'E')
1145      for (j = 0; j < XVECLEN (x, i); j++)
1146	changed |= mention_regs (XVECEXP (x, i, j));
1147
1148  return changed;
1149}
1150
1151/* Update the register quantities for inserting X into the hash table
1152   with a value equivalent to CLASSP.
1153   (If the class does not contain a REG, it is irrelevant.)
1154   If MODIFIED is nonzero, X is a destination; it is being modified.
1155   Note that delete_reg_equiv should be called on a register
1156   before insert_regs is done on that register with MODIFIED != 0.
1157
1158   Nonzero value means that elements of reg_qty have changed
1159   so X's hash code may be different.  */
1160
1161static int
1162insert_regs (x, classp, modified)
1163     rtx x;
1164     struct table_elt *classp;
1165     int modified;
1166{
1167  if (GET_CODE (x) == REG)
1168    {
1169      register int regno = REGNO (x);
1170
1171      /* If REGNO is in the equivalence table already but is of the
1172	 wrong mode for that equivalence, don't do anything here.  */
1173
1174      if (REGNO_QTY_VALID_P (regno)
1175	  && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1176	return 0;
1177
1178      if (modified || ! REGNO_QTY_VALID_P (regno))
1179	{
1180	  if (classp)
1181	    for (classp = classp->first_same_value;
1182		 classp != 0;
1183		 classp = classp->next_same_value)
1184	      if (GET_CODE (classp->exp) == REG
1185		  && GET_MODE (classp->exp) == GET_MODE (x))
1186		{
1187		  make_regs_eqv (regno, REGNO (classp->exp));
1188		  return 1;
1189		}
1190
1191	  make_new_qty (regno);
1192	  qty_mode[REG_QTY (regno)] = GET_MODE (x);
1193	  return 1;
1194	}
1195
1196      return 0;
1197    }
1198
1199  /* If X is a SUBREG, we will likely be inserting the inner register in the
1200     table.  If that register doesn't have an assigned quantity number at
1201     this point but does later, the insertion that we will be doing now will
1202     not be accessible because its hash code will have changed.  So assign
1203     a quantity number now.  */
1204
1205  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1206	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1207    {
1208      int regno = REGNO (SUBREG_REG (x));
1209
1210      insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1211      /* Mention_regs checks if REG_TICK is exactly one larger than
1212	 REG_IN_TABLE to find out if there was only a single preceding
1213	 invalidation - for the SUBREG - or another one, which would be
1214	 for the full register.  Since we don't invalidate the SUBREG
1215	 here first, we might have to bump up REG_TICK so that mention_regs
1216	 will do the right thing.  */
1217      if (REG_IN_TABLE (regno) >= 0
1218	  && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1219	REG_TICK (regno)++;
1220      mention_regs (x);
1221      return 1;
1222    }
1223  else
1224    return mention_regs (x);
1225}
1226
1227/* Look in or update the hash table.  */
1228
1229/* Put the element ELT on the list of free elements.  */
1230
1231static void
1232free_element (elt)
1233     struct table_elt *elt;
1234{
1235  elt->next_same_hash = free_element_chain;
1236  free_element_chain = elt;
1237}
1238
1239/* Return an element that is free for use.  */
1240
1241static struct table_elt *
1242get_element ()
1243{
1244  struct table_elt *elt = free_element_chain;
1245  if (elt)
1246    {
1247      free_element_chain = elt->next_same_hash;
1248      return elt;
1249    }
1250  n_elements_made++;
1251  return (struct table_elt *) oballoc (sizeof (struct table_elt));
1252}
1253
1254/* Remove table element ELT from use in the table.
1255   HASH is its hash code, made using the HASH macro.
1256   It's an argument because often that is known in advance
1257   and we save much time not recomputing it.  */
1258
1259static void
1260remove_from_table (elt, hash)
1261     register struct table_elt *elt;
1262     unsigned hash;
1263{
1264  if (elt == 0)
1265    return;
1266
1267  /* Mark this element as removed.  See cse_insn.  */
1268  elt->first_same_value = 0;
1269
1270  /* Remove the table element from its equivalence class.  */
1271
1272  {
1273    register struct table_elt *prev = elt->prev_same_value;
1274    register struct table_elt *next = elt->next_same_value;
1275
1276    if (next) next->prev_same_value = prev;
1277
1278    if (prev)
1279      prev->next_same_value = next;
1280    else
1281      {
1282	register struct table_elt *newfirst = next;
1283	while (next)
1284	  {
1285	    next->first_same_value = newfirst;
1286	    next = next->next_same_value;
1287	  }
1288      }
1289  }
1290
1291  /* Remove the table element from its hash bucket.  */
1292
1293  {
1294    register struct table_elt *prev = elt->prev_same_hash;
1295    register struct table_elt *next = elt->next_same_hash;
1296
1297    if (next) next->prev_same_hash = prev;
1298
1299    if (prev)
1300      prev->next_same_hash = next;
1301    else if (table[hash] == elt)
1302      table[hash] = next;
1303    else
1304      {
1305	/* This entry is not in the proper hash bucket.  This can happen
1306	   when two classes were merged by `merge_equiv_classes'.  Search
1307	   for the hash bucket that it heads.  This happens only very
1308	   rarely, so the cost is acceptable.  */
1309	for (hash = 0; hash < NBUCKETS; hash++)
1310	  if (table[hash] == elt)
1311	    table[hash] = next;
1312      }
1313  }
1314
1315  /* Remove the table element from its related-value circular chain.  */
1316
1317  if (elt->related_value != 0 && elt->related_value != elt)
1318    {
1319      register struct table_elt *p = elt->related_value;
1320      while (p->related_value != elt)
1321	p = p->related_value;
1322      p->related_value = elt->related_value;
1323      if (p->related_value == p)
1324	p->related_value = 0;
1325    }
1326
1327  free_element (elt);
1328}
1329
1330/* Look up X in the hash table and return its table element,
1331   or 0 if X is not in the table.
1332
1333   MODE is the machine-mode of X, or if X is an integer constant
1334   with VOIDmode then MODE is the mode with which X will be used.
1335
1336   Here we are satisfied to find an expression whose tree structure
1337   looks like X.  */
1338
1339static struct table_elt *
1340lookup (x, hash, mode)
1341     rtx x;
1342     unsigned hash;
1343     enum machine_mode mode;
1344{
1345  register struct table_elt *p;
1346
1347  for (p = table[hash]; p; p = p->next_same_hash)
1348    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1349			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1350      return p;
1351
1352  return 0;
1353}
1354
1355/* Like `lookup' but don't care whether the table element uses invalid regs.
1356   Also ignore discrepancies in the machine mode of a register.  */
1357
1358static struct table_elt *
1359lookup_for_remove (x, hash, mode)
1360     rtx x;
1361     unsigned hash;
1362     enum machine_mode mode;
1363{
1364  register struct table_elt *p;
1365
1366  if (GET_CODE (x) == REG)
1367    {
1368      int regno = REGNO (x);
1369      /* Don't check the machine mode when comparing registers;
1370	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1371      for (p = table[hash]; p; p = p->next_same_hash)
1372	if (GET_CODE (p->exp) == REG
1373	    && REGNO (p->exp) == regno)
1374	  return p;
1375    }
1376  else
1377    {
1378      for (p = table[hash]; p; p = p->next_same_hash)
1379	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1380	  return p;
1381    }
1382
1383  return 0;
1384}
1385
1386/* Look for an expression equivalent to X and with code CODE.
1387   If one is found, return that expression.  */
1388
1389static rtx
1390lookup_as_function (x, code)
1391     rtx x;
1392     enum rtx_code code;
1393{
1394  register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1395					 GET_MODE (x));
1396  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1397     long as we are narrowing.  So if we looked in vain for a mode narrower
1398     than word_mode before, look for word_mode now.  */
1399  if (p == 0 && code == CONST_INT
1400      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1401    {
1402      x = copy_rtx (x);
1403      PUT_MODE (x, word_mode);
1404      p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1405    }
1406
1407  if (p == 0)
1408    return 0;
1409
1410  for (p = p->first_same_value; p; p = p->next_same_value)
1411    {
1412      if (GET_CODE (p->exp) == code
1413	  /* Make sure this is a valid entry in the table.  */
1414	  && exp_equiv_p (p->exp, p->exp, 1, 0))
1415	return p->exp;
1416    }
1417
1418  return 0;
1419}
1420
1421/* Insert X in the hash table, assuming HASH is its hash code
1422   and CLASSP is an element of the class it should go in
1423   (or 0 if a new class should be made).
1424   It is inserted at the proper position to keep the class in
1425   the order cheapest first.
1426
1427   MODE is the machine-mode of X, or if X is an integer constant
1428   with VOIDmode then MODE is the mode with which X will be used.
1429
1430   For elements of equal cheapness, the most recent one
1431   goes in front, except that the first element in the list
1432   remains first unless a cheaper element is added.  The order of
1433   pseudo-registers does not matter, as canon_reg will be called to
1434   find the cheapest when a register is retrieved from the table.
1435
1436   The in_memory field in the hash table element is set to 0.
1437   The caller must set it nonzero if appropriate.
1438
1439   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1440   and if insert_regs returns a nonzero value
1441   you must then recompute its hash code before calling here.
1442
1443   If necessary, update table showing constant values of quantities.  */
1444
1445#define CHEAPER(X,Y)   ((X)->cost < (Y)->cost)
1446
1447static struct table_elt *
1448insert (x, classp, hash, mode)
1449     register rtx x;
1450     register struct table_elt *classp;
1451     unsigned hash;
1452     enum machine_mode mode;
1453{
1454  register struct table_elt *elt;
1455
1456  /* If X is a register and we haven't made a quantity for it,
1457     something is wrong.  */
1458  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1459    abort ();
1460
1461  /* If X is a hard register, show it is being put in the table.  */
1462  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1463    {
1464      int regno = REGNO (x);
1465      int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1466      int i;
1467
1468      for (i = regno; i < endregno; i++)
1469	    SET_HARD_REG_BIT (hard_regs_in_table, i);
1470    }
1471
1472  /* If X is a label, show we recorded it.  */
1473  if (GET_CODE (x) == LABEL_REF
1474      || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1475	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1476    recorded_label_ref = 1;
1477
1478  /* Put an element for X into the right hash bucket.  */
1479
1480  elt = get_element ();
1481  elt->exp = x;
1482  elt->cost = COST (x);
1483  elt->next_same_value = 0;
1484  elt->prev_same_value = 0;
1485  elt->next_same_hash = table[hash];
1486  elt->prev_same_hash = 0;
1487  elt->related_value = 0;
1488  elt->in_memory = 0;
1489  elt->mode = mode;
1490  elt->is_const = (CONSTANT_P (x)
1491		   /* GNU C++ takes advantage of this for `this'
1492		      (and other const values).  */
1493		   || (RTX_UNCHANGING_P (x)
1494		       && GET_CODE (x) == REG
1495		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1496		   || FIXED_BASE_PLUS_P (x));
1497
1498  if (table[hash])
1499    table[hash]->prev_same_hash = elt;
1500  table[hash] = elt;
1501
1502  /* Put it into the proper value-class.  */
1503  if (classp)
1504    {
1505      classp = classp->first_same_value;
1506      if (CHEAPER (elt, classp))
1507	/* Insert at the head of the class */
1508	{
1509	  register struct table_elt *p;
1510	  elt->next_same_value = classp;
1511	  classp->prev_same_value = elt;
1512	  elt->first_same_value = elt;
1513
1514	  for (p = classp; p; p = p->next_same_value)
1515	    p->first_same_value = elt;
1516	}
1517      else
1518	{
1519	  /* Insert not at head of the class.  */
1520	  /* Put it after the last element cheaper than X.  */
1521	  register struct table_elt *p, *next;
1522	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1523	       p = next);
1524	  /* Put it after P and before NEXT.  */
1525	  elt->next_same_value = next;
1526	  if (next)
1527	    next->prev_same_value = elt;
1528	  elt->prev_same_value = p;
1529	  p->next_same_value = elt;
1530	  elt->first_same_value = classp;
1531	}
1532    }
1533  else
1534    elt->first_same_value = elt;
1535
1536  /* If this is a constant being set equivalent to a register or a register
1537     being set equivalent to a constant, note the constant equivalence.
1538
1539     If this is a constant, it cannot be equivalent to a different constant,
1540     and a constant is the only thing that can be cheaper than a register.  So
1541     we know the register is the head of the class (before the constant was
1542     inserted).
1543
1544     If this is a register that is not already known equivalent to a
1545     constant, we must check the entire class.
1546
1547     If this is a register that is already known equivalent to an insn,
1548     update `qty_const_insn' to show that `this_insn' is the latest
1549     insn making that quantity equivalent to the constant.  */
1550
1551  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1552      && GET_CODE (x) != REG)
1553    {
1554      qty_const[REG_QTY (REGNO (classp->exp))]
1555	= gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1556      qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
1557    }
1558
1559  else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
1560	   && ! elt->is_const)
1561    {
1562      register struct table_elt *p;
1563
1564      for (p = classp; p != 0; p = p->next_same_value)
1565	{
1566	  if (p->is_const && GET_CODE (p->exp) != REG)
1567	    {
1568	      qty_const[REG_QTY (REGNO (x))]
1569		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1570	      qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1571	      break;
1572	    }
1573	}
1574    }
1575
1576  else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1577	   && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1578    qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1579
1580  /* If this is a constant with symbolic value,
1581     and it has a term with an explicit integer value,
1582     link it up with related expressions.  */
1583  if (GET_CODE (x) == CONST)
1584    {
1585      rtx subexp = get_related_value (x);
1586      unsigned subhash;
1587      struct table_elt *subelt, *subelt_prev;
1588
1589      if (subexp != 0)
1590	{
1591	  /* Get the integer-free subexpression in the hash table.  */
1592	  subhash = safe_hash (subexp, mode) % NBUCKETS;
1593	  subelt = lookup (subexp, subhash, mode);
1594	  if (subelt == 0)
1595	    subelt = insert (subexp, NULL_PTR, subhash, mode);
1596	  /* Initialize SUBELT's circular chain if it has none.  */
1597	  if (subelt->related_value == 0)
1598	    subelt->related_value = subelt;
1599	  /* Find the element in the circular chain that precedes SUBELT.  */
1600	  subelt_prev = subelt;
1601	  while (subelt_prev->related_value != subelt)
1602	    subelt_prev = subelt_prev->related_value;
1603	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1604	     This way the element that follows SUBELT is the oldest one.  */
1605	  elt->related_value = subelt_prev->related_value;
1606	  subelt_prev->related_value = elt;
1607	}
1608    }
1609
1610  return elt;
1611}
1612
1613/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1614   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1615   the two classes equivalent.
1616
1617   CLASS1 will be the surviving class; CLASS2 should not be used after this
1618   call.
1619
1620   Any invalid entries in CLASS2 will not be copied.  */
1621
1622static void
1623merge_equiv_classes (class1, class2)
1624     struct table_elt *class1, *class2;
1625{
1626  struct table_elt *elt, *next, *new;
1627
1628  /* Ensure we start with the head of the classes.  */
1629  class1 = class1->first_same_value;
1630  class2 = class2->first_same_value;
1631
1632  /* If they were already equal, forget it.  */
1633  if (class1 == class2)
1634    return;
1635
1636  for (elt = class2; elt; elt = next)
1637    {
1638      unsigned hash;
1639      rtx exp = elt->exp;
1640      enum machine_mode mode = elt->mode;
1641
1642      next = elt->next_same_value;
1643
1644      /* Remove old entry, make a new one in CLASS1's class.
1645	 Don't do this for invalid entries as we cannot find their
1646	 hash code (it also isn't necessary).  */
1647      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1648	{
1649	  hash_arg_in_memory = 0;
1650	  hash_arg_in_struct = 0;
1651	  hash = HASH (exp, mode);
1652
1653	  if (GET_CODE (exp) == REG)
1654	    delete_reg_equiv (REGNO (exp));
1655
1656	  remove_from_table (elt, hash);
1657
1658	  if (insert_regs (exp, class1, 0))
1659	    {
1660	      rehash_using_reg (exp);
1661	      hash = HASH (exp, mode);
1662	    }
1663	  new = insert (exp, class1, hash, mode);
1664	  new->in_memory = hash_arg_in_memory;
1665	  new->in_struct = hash_arg_in_struct;
1666	}
1667    }
1668}
1669
1670
1671/* Flush the entire hash table.  */
1672
1673static void
1674flush_hash_table ()
1675{
1676  int i;
1677  struct table_elt *p;
1678
1679  for (i = 0; i < NBUCKETS; i++)
1680    for (p = table[i]; p; p = table[i])
1681      {
1682	/* Note that invalidate can remove elements
1683	   after P in the current hash chain.  */
1684	if (GET_CODE (p->exp) == REG)
1685	  invalidate (p->exp, p->mode);
1686	else
1687	  remove_from_table (p, i);
1688      }
1689}
1690
1691
1692/* Remove from the hash table, or mark as invalid,
1693   all expressions whose values could be altered by storing in X.
1694   X is a register, a subreg, or a memory reference with nonvarying address
1695   (because, when a memory reference with a varying address is stored in,
1696   all memory references are removed by invalidate_memory
1697   so specific invalidation is superfluous).
1698   FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1699   instead of just the amount indicated by the mode of X.  This is only used
1700   for bitfield stores into memory.
1701
1702   A nonvarying address may be just a register or just
1703   a symbol reference, or it may be either of those plus
1704   a numeric offset.  */
1705
1706static void
1707invalidate (x, full_mode)
1708     rtx x;
1709     enum machine_mode full_mode;
1710{
1711  register int i;
1712  register struct table_elt *p;
1713
1714  /* If X is a register, dependencies on its contents
1715     are recorded through the qty number mechanism.
1716     Just change the qty number of the register,
1717     mark it as invalid for expressions that refer to it,
1718     and remove it itself.  */
1719
1720  if (GET_CODE (x) == REG)
1721    {
1722      register int regno = REGNO (x);
1723      register unsigned hash = HASH (x, GET_MODE (x));
1724
1725      /* Remove REGNO from any quantity list it might be on and indicate
1726	 that its value might have changed.  If it is a pseudo, remove its
1727	 entry from the hash table.
1728
1729	 For a hard register, we do the first two actions above for any
1730	 additional hard registers corresponding to X.  Then, if any of these
1731	 registers are in the table, we must remove any REG entries that
1732	 overlap these registers.  */
1733
1734      delete_reg_equiv (regno);
1735      REG_TICK (regno)++;
1736
1737      if (regno >= FIRST_PSEUDO_REGISTER)
1738	{
1739	  /* Because a register can be referenced in more than one mode,
1740	     we might have to remove more than one table entry.  */
1741
1742	  struct table_elt *elt;
1743
1744	  while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1745	    remove_from_table (elt, hash);
1746	}
1747      else
1748	{
1749	  HOST_WIDE_INT in_table
1750	    = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1751	  int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1752	  int tregno, tendregno;
1753	  register struct table_elt *p, *next;
1754
1755	  CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1756
1757	  for (i = regno + 1; i < endregno; i++)
1758	    {
1759	      in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1760	      CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1761	      delete_reg_equiv (i);
1762	      REG_TICK (i)++;
1763	    }
1764
1765	  if (in_table)
1766	    for (hash = 0; hash < NBUCKETS; hash++)
1767	      for (p = table[hash]; p; p = next)
1768		{
1769		  next = p->next_same_hash;
1770
1771		  if (GET_CODE (p->exp) != REG
1772		      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1773		    continue;
1774
1775		  tregno = REGNO (p->exp);
1776		  tendregno
1777		    = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1778		  if (tendregno > regno && tregno < endregno)
1779		    remove_from_table (p, hash);
1780		}
1781	}
1782
1783      return;
1784    }
1785
1786  if (GET_CODE (x) == SUBREG)
1787    {
1788      if (GET_CODE (SUBREG_REG (x)) != REG)
1789	abort ();
1790      invalidate (SUBREG_REG (x), VOIDmode);
1791      return;
1792    }
1793
1794  /* If X is a parallel, invalidate all of its elements.  */
1795
1796  if (GET_CODE (x) == PARALLEL)
1797    {
1798      for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1799	invalidate (XVECEXP (x, 0, i), VOIDmode);
1800      return;
1801    }
1802
1803  /* If X is an expr_list, this is part of a disjoint return value;
1804     extract the location in question ignoring the offset.  */
1805
1806  if (GET_CODE (x) == EXPR_LIST)
1807    {
1808      invalidate (XEXP (x, 0), VOIDmode);
1809      return;
1810    }
1811
1812  /* X is not a register; it must be a memory reference with
1813     a nonvarying address.  Remove all hash table elements
1814     that refer to overlapping pieces of memory.  */
1815
1816  if (GET_CODE (x) != MEM)
1817    abort ();
1818
1819  if (full_mode == VOIDmode)
1820    full_mode = GET_MODE (x);
1821
1822  for (i = 0; i < NBUCKETS; i++)
1823    {
1824      register struct table_elt *next;
1825      for (p = table[i]; p; p = next)
1826	{
1827	  next = p->next_same_hash;
1828	  /* Invalidate ASM_OPERANDS which reference memory (this is easier
1829	     than checking all the aliases).  */
1830	  if (p->in_memory
1831	      && (GET_CODE (p->exp) != MEM
1832		  || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1833	    remove_from_table (p, i);
1834	}
1835    }
1836}
1837
1838/* Remove all expressions that refer to register REGNO,
1839   since they are already invalid, and we are about to
1840   mark that register valid again and don't want the old
1841   expressions to reappear as valid.  */
1842
1843static void
1844remove_invalid_refs (regno)
1845     int regno;
1846{
1847  register int i;
1848  register struct table_elt *p, *next;
1849
1850  for (i = 0; i < NBUCKETS; i++)
1851    for (p = table[i]; p; p = next)
1852      {
1853	next = p->next_same_hash;
1854	if (GET_CODE (p->exp) != REG
1855	    && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1856	  remove_from_table (p, i);
1857      }
1858}
1859
1860/* Likewise for a subreg with subreg_reg WORD and mode MODE.  */
1861static void
1862remove_invalid_subreg_refs (regno, word, mode)
1863     int regno;
1864     int word;
1865     enum machine_mode mode;
1866{
1867  register int i;
1868  register struct table_elt *p, *next;
1869  int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1870
1871  for (i = 0; i < NBUCKETS; i++)
1872    for (p = table[i]; p; p = next)
1873      {
1874	rtx exp;
1875	next = p->next_same_hash;
1876
1877	exp = p->exp;
1878	if (GET_CODE (p->exp) != REG
1879	    && (GET_CODE (exp) != SUBREG
1880		|| GET_CODE (SUBREG_REG (exp)) != REG
1881		|| REGNO (SUBREG_REG (exp)) != regno
1882		|| (((SUBREG_WORD (exp)
1883		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1884		     >= word)
1885		 && SUBREG_WORD (exp) <= end))
1886	    && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1887	  remove_from_table (p, i);
1888      }
1889}
1890
1891/* Recompute the hash codes of any valid entries in the hash table that
1892   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1893
1894   This is called when we make a jump equivalence.  */
1895
1896static void
1897rehash_using_reg (x)
1898     rtx x;
1899{
1900  unsigned int i;
1901  struct table_elt *p, *next;
1902  unsigned hash;
1903
1904  if (GET_CODE (x) == SUBREG)
1905    x = SUBREG_REG (x);
1906
1907  /* If X is not a register or if the register is known not to be in any
1908     valid entries in the table, we have no work to do.  */
1909
1910  if (GET_CODE (x) != REG
1911      || REG_IN_TABLE (REGNO (x)) < 0
1912      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1913    return;
1914
1915  /* Scan all hash chains looking for valid entries that mention X.
1916     If we find one and it is in the wrong hash chain, move it.  We can skip
1917     objects that are registers, since they are handled specially.  */
1918
1919  for (i = 0; i < NBUCKETS; i++)
1920    for (p = table[i]; p; p = next)
1921      {
1922	next = p->next_same_hash;
1923	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1924	    && exp_equiv_p (p->exp, p->exp, 1, 0)
1925	    && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1926	  {
1927	    if (p->next_same_hash)
1928	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
1929
1930	    if (p->prev_same_hash)
1931	      p->prev_same_hash->next_same_hash = p->next_same_hash;
1932	    else
1933	      table[i] = p->next_same_hash;
1934
1935	    p->next_same_hash = table[hash];
1936	    p->prev_same_hash = 0;
1937	    if (table[hash])
1938	      table[hash]->prev_same_hash = p;
1939	    table[hash] = p;
1940	  }
1941      }
1942}
1943
1944/* Remove from the hash table any expression that is a call-clobbered
1945   register.  Also update their TICK values.  */
1946
1947static void
1948invalidate_for_call ()
1949{
1950  int regno, endregno;
1951  int i;
1952  unsigned hash;
1953  struct table_elt *p, *next;
1954  int in_table = 0;
1955
1956  /* Go through all the hard registers.  For each that is clobbered in
1957     a CALL_INSN, remove the register from quantity chains and update
1958     reg_tick if defined.  Also see if any of these registers is currently
1959     in the table.  */
1960
1961  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1962    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1963      {
1964	delete_reg_equiv (regno);
1965	if (REG_TICK (regno) >= 0)
1966	  REG_TICK (regno)++;
1967
1968	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1969      }
1970
1971  /* In the case where we have no call-clobbered hard registers in the
1972     table, we are done.  Otherwise, scan the table and remove any
1973     entry that overlaps a call-clobbered register.  */
1974
1975  if (in_table)
1976    for (hash = 0; hash < NBUCKETS; hash++)
1977      for (p = table[hash]; p; p = next)
1978	{
1979	  next = p->next_same_hash;
1980
1981	  if (p->in_memory)
1982	    {
1983	      remove_from_table (p, hash);
1984	      continue;
1985	    }
1986
1987	  if (GET_CODE (p->exp) != REG
1988	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1989	    continue;
1990
1991	  regno = REGNO (p->exp);
1992	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1993
1994	  for (i = regno; i < endregno; i++)
1995	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1996	      {
1997		remove_from_table (p, hash);
1998		break;
1999	      }
2000	}
2001}
2002
2003/* Given an expression X of type CONST,
2004   and ELT which is its table entry (or 0 if it
2005   is not in the hash table),
2006   return an alternate expression for X as a register plus integer.
2007   If none can be found, return 0.  */
2008
2009static rtx
2010use_related_value (x, elt)
2011     rtx x;
2012     struct table_elt *elt;
2013{
2014  register struct table_elt *relt = 0;
2015  register struct table_elt *p, *q;
2016  HOST_WIDE_INT offset;
2017
2018  /* First, is there anything related known?
2019     If we have a table element, we can tell from that.
2020     Otherwise, must look it up.  */
2021
2022  if (elt != 0 && elt->related_value != 0)
2023    relt = elt;
2024  else if (elt == 0 && GET_CODE (x) == CONST)
2025    {
2026      rtx subexp = get_related_value (x);
2027      if (subexp != 0)
2028	relt = lookup (subexp,
2029		       safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2030		       GET_MODE (subexp));
2031    }
2032
2033  if (relt == 0)
2034    return 0;
2035
2036  /* Search all related table entries for one that has an
2037     equivalent register.  */
2038
2039  p = relt;
2040  while (1)
2041    {
2042      /* This loop is strange in that it is executed in two different cases.
2043	 The first is when X is already in the table.  Then it is searching
2044	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2045	 X is not in the table.  Then RELT points to a class for the related
2046	 value.
2047
2048	 Ensure that, whatever case we are in, that we ignore classes that have
2049	 the same value as X.  */
2050
2051      if (rtx_equal_p (x, p->exp))
2052	q = 0;
2053      else
2054	for (q = p->first_same_value; q; q = q->next_same_value)
2055	  if (GET_CODE (q->exp) == REG)
2056	    break;
2057
2058      if (q)
2059	break;
2060
2061      p = p->related_value;
2062
2063      /* We went all the way around, so there is nothing to be found.
2064	 Alternatively, perhaps RELT was in the table for some other reason
2065	 and it has no related values recorded.  */
2066      if (p == relt || p == 0)
2067	break;
2068    }
2069
2070  if (q == 0)
2071    return 0;
2072
2073  offset = (get_integer_term (x) - get_integer_term (p->exp));
2074  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2075  return plus_constant (q->exp, offset);
2076}
2077
2078/* Hash an rtx.  We are careful to make sure the value is never negative.
2079   Equivalent registers hash identically.
2080   MODE is used in hashing for CONST_INTs only;
2081   otherwise the mode of X is used.
2082
2083   Store 1 in do_not_record if any subexpression is volatile.
2084
2085   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2086   which does not have the RTX_UNCHANGING_P bit set.
2087   In this case, also store 1 in hash_arg_in_struct
2088   if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2089
2090   Note that cse_insn knows that the hash code of a MEM expression
2091   is just (int) MEM plus the hash code of the address.  */
2092
2093static unsigned
2094canon_hash (x, mode)
2095     rtx x;
2096     enum machine_mode mode;
2097{
2098  register int i, j;
2099  register unsigned hash = 0;
2100  register enum rtx_code code;
2101  register char *fmt;
2102
2103  /* repeat is used to turn tail-recursion into iteration.  */
2104 repeat:
2105  if (x == 0)
2106    return hash;
2107
2108  code = GET_CODE (x);
2109  switch (code)
2110    {
2111    case REG:
2112      {
2113	register int regno = REGNO (x);
2114
2115	/* On some machines, we can't record any non-fixed hard register,
2116	   because extending its life will cause reload problems.  We
2117	   consider ap, fp, and sp to be fixed for this purpose.
2118
2119	   We also consider CCmode registers to be fixed for this purpose;
2120	   failure to do so leads to failure to simplify 0<100 type of
2121	   conditionals.
2122
2123	   On all machines, we can't record any global registers.  */
2124
2125	if (regno < FIRST_PSEUDO_REGISTER
2126	    && (global_regs[regno]
2127		|| (SMALL_REGISTER_CLASSES
2128		    && ! fixed_regs[regno]
2129		    && regno != FRAME_POINTER_REGNUM
2130		    && regno != HARD_FRAME_POINTER_REGNUM
2131		    && regno != ARG_POINTER_REGNUM
2132		    && regno != STACK_POINTER_REGNUM
2133		    && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2134	  {
2135	    do_not_record = 1;
2136	    return 0;
2137	  }
2138	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2139	return hash;
2140      }
2141
2142    /* We handle SUBREG of a REG specially because the underlying
2143       reg changes its hash value with every value change; we don't
2144       want to have to forget unrelated subregs when one subreg changes.  */
2145    case SUBREG:
2146      {
2147	if (GET_CODE (SUBREG_REG (x)) == REG)
2148	  {
2149	    hash += (((unsigned) SUBREG << 7)
2150		     + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2151	    return hash;
2152	  }
2153	break;
2154      }
2155
2156    case CONST_INT:
2157      {
2158	unsigned HOST_WIDE_INT tem = INTVAL (x);
2159	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2160	return hash;
2161      }
2162
2163    case CONST_DOUBLE:
2164      /* This is like the general case, except that it only counts
2165	 the integers representing the constant.  */
2166      hash += (unsigned) code + (unsigned) GET_MODE (x);
2167      if (GET_MODE (x) != VOIDmode)
2168	for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2169	  {
2170	    unsigned tem = XINT (x, i);
2171	    hash += tem;
2172	  }
2173      else
2174	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2175		 + (unsigned) CONST_DOUBLE_HIGH (x));
2176      return hash;
2177
2178      /* Assume there is only one rtx object for any given label.  */
2179    case LABEL_REF:
2180      hash
2181	+= ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2182      return hash;
2183
2184    case SYMBOL_REF:
2185      hash
2186	+= ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2187      return hash;
2188
2189    case MEM:
2190      if (MEM_VOLATILE_P (x))
2191	{
2192	  do_not_record = 1;
2193	  return 0;
2194	}
2195      if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2196	{
2197	  hash_arg_in_memory = 1;
2198	  if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2199	}
2200      /* Now that we have already found this special case,
2201	 might as well speed it up as much as possible.  */
2202      hash += (unsigned) MEM;
2203      x = XEXP (x, 0);
2204      goto repeat;
2205
2206    case PRE_DEC:
2207    case PRE_INC:
2208    case POST_DEC:
2209    case POST_INC:
2210    case PC:
2211    case CC0:
2212    case CALL:
2213    case UNSPEC_VOLATILE:
2214      do_not_record = 1;
2215      return 0;
2216
2217    case ASM_OPERANDS:
2218      if (MEM_VOLATILE_P (x))
2219	{
2220	  do_not_record = 1;
2221	  return 0;
2222	}
2223      break;
2224
2225    default:
2226      break;
2227    }
2228
2229  i = GET_RTX_LENGTH (code) - 1;
2230  hash += (unsigned) code + (unsigned) GET_MODE (x);
2231  fmt = GET_RTX_FORMAT (code);
2232  for (; i >= 0; i--)
2233    {
2234      if (fmt[i] == 'e')
2235	{
2236	  rtx tem = XEXP (x, i);
2237
2238	  /* If we are about to do the last recursive call
2239	     needed at this level, change it into iteration.
2240	     This function  is called enough to be worth it.  */
2241	  if (i == 0)
2242	    {
2243	      x = tem;
2244	      goto repeat;
2245	    }
2246	  hash += canon_hash (tem, 0);
2247	}
2248      else if (fmt[i] == 'E')
2249	for (j = 0; j < XVECLEN (x, i); j++)
2250	  hash += canon_hash (XVECEXP (x, i, j), 0);
2251      else if (fmt[i] == 's')
2252	{
2253	  register unsigned char *p = (unsigned char *) XSTR (x, i);
2254	  if (p)
2255	    while (*p)
2256	      hash += *p++;
2257	}
2258      else if (fmt[i] == 'i')
2259	{
2260	  register unsigned tem = XINT (x, i);
2261	  hash += tem;
2262	}
2263      else if (fmt[i] == '0')
2264	/* unused */;
2265      else
2266	abort ();
2267    }
2268  return hash;
2269}
2270
2271/* Like canon_hash but with no side effects.  */
2272
2273static unsigned
2274safe_hash (x, mode)
2275     rtx x;
2276     enum machine_mode mode;
2277{
2278  int save_do_not_record = do_not_record;
2279  int save_hash_arg_in_memory = hash_arg_in_memory;
2280  int save_hash_arg_in_struct = hash_arg_in_struct;
2281  unsigned hash = canon_hash (x, mode);
2282  hash_arg_in_memory = save_hash_arg_in_memory;
2283  hash_arg_in_struct = save_hash_arg_in_struct;
2284  do_not_record = save_do_not_record;
2285  return hash;
2286}
2287
2288/* Return 1 iff X and Y would canonicalize into the same thing,
2289   without actually constructing the canonicalization of either one.
2290   If VALIDATE is nonzero,
2291   we assume X is an expression being processed from the rtl
2292   and Y was found in the hash table.  We check register refs
2293   in Y for being marked as valid.
2294
2295   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2296   that is known to be in the register.  Ordinarily, we don't allow them
2297   to match, because letting them match would cause unpredictable results
2298   in all the places that search a hash table chain for an equivalent
2299   for a given value.  A possible equivalent that has different structure
2300   has its hash code computed from different data.  Whether the hash code
2301   is the same as that of the given value is pure luck.  */
2302
2303static int
2304exp_equiv_p (x, y, validate, equal_values)
2305     rtx x, y;
2306     int validate;
2307     int equal_values;
2308{
2309  register int i, j;
2310  register enum rtx_code code;
2311  register char *fmt;
2312
2313  /* Note: it is incorrect to assume an expression is equivalent to itself
2314     if VALIDATE is nonzero.  */
2315  if (x == y && !validate)
2316    return 1;
2317  if (x == 0 || y == 0)
2318    return x == y;
2319
2320  code = GET_CODE (x);
2321  if (code != GET_CODE (y))
2322    {
2323      if (!equal_values)
2324	return 0;
2325
2326      /* If X is a constant and Y is a register or vice versa, they may be
2327	 equivalent.  We only have to validate if Y is a register.  */
2328      if (CONSTANT_P (x) && GET_CODE (y) == REG
2329	  && REGNO_QTY_VALID_P (REGNO (y))
2330	  && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2331	  && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2332	  && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2333	return 1;
2334
2335      if (CONSTANT_P (y) && code == REG
2336	  && REGNO_QTY_VALID_P (REGNO (x))
2337	  && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2338	  && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
2339	return 1;
2340
2341      return 0;
2342    }
2343
2344  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2345  if (GET_MODE (x) != GET_MODE (y))
2346    return 0;
2347
2348  switch (code)
2349    {
2350    case PC:
2351    case CC0:
2352      return x == y;
2353
2354    case CONST_INT:
2355      return INTVAL (x) == INTVAL (y);
2356
2357    case LABEL_REF:
2358      return XEXP (x, 0) == XEXP (y, 0);
2359
2360    case SYMBOL_REF:
2361      return XSTR (x, 0) == XSTR (y, 0);
2362
2363    case REG:
2364      {
2365	int regno = REGNO (y);
2366	int endregno
2367	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2368		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2369	int i;
2370
2371	/* If the quantities are not the same, the expressions are not
2372	   equivalent.  If there are and we are not to validate, they
2373	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2374
2375	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2376	  return 0;
2377
2378	if (! validate)
2379	  return 1;
2380
2381	for (i = regno; i < endregno; i++)
2382	  if (REG_IN_TABLE (i) != REG_TICK (i))
2383	    return 0;
2384
2385	return 1;
2386      }
2387
2388    /*  For commutative operations, check both orders.  */
2389    case PLUS:
2390    case MULT:
2391    case AND:
2392    case IOR:
2393    case XOR:
2394    case NE:
2395    case EQ:
2396      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2397	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2398			       validate, equal_values))
2399	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2400			       validate, equal_values)
2401		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2402				  validate, equal_values)));
2403
2404    default:
2405      break;
2406    }
2407
2408  /* Compare the elements.  If any pair of corresponding elements
2409     fail to match, return 0 for the whole things.  */
2410
2411  fmt = GET_RTX_FORMAT (code);
2412  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2413    {
2414      switch (fmt[i])
2415	{
2416	case 'e':
2417	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2418	    return 0;
2419	  break;
2420
2421	case 'E':
2422	  if (XVECLEN (x, i) != XVECLEN (y, i))
2423	    return 0;
2424	  for (j = 0; j < XVECLEN (x, i); j++)
2425	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2426			       validate, equal_values))
2427	      return 0;
2428	  break;
2429
2430	case 's':
2431	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2432	    return 0;
2433	  break;
2434
2435	case 'i':
2436	  if (XINT (x, i) != XINT (y, i))
2437	    return 0;
2438	  break;
2439
2440	case 'w':
2441	  if (XWINT (x, i) != XWINT (y, i))
2442	    return 0;
2443	break;
2444
2445	case '0':
2446	  break;
2447
2448	default:
2449	  abort ();
2450	}
2451      }
2452
2453  return 1;
2454}
2455
2456/* Return 1 iff any subexpression of X matches Y.
2457   Here we do not require that X or Y be valid (for registers referred to)
2458   for being in the hash table.  */
2459
2460static int
2461refers_to_p (x, y)
2462     rtx x, y;
2463{
2464  register int i;
2465  register enum rtx_code code;
2466  register char *fmt;
2467
2468 repeat:
2469  if (x == y)
2470    return 1;
2471  if (x == 0 || y == 0)
2472    return 0;
2473
2474  code = GET_CODE (x);
2475  /* If X as a whole has the same code as Y, they may match.
2476     If so, return 1.  */
2477  if (code == GET_CODE (y))
2478    {
2479      if (exp_equiv_p (x, y, 0, 1))
2480	return 1;
2481    }
2482
2483  /* X does not match, so try its subexpressions.  */
2484
2485  fmt = GET_RTX_FORMAT (code);
2486  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2487    if (fmt[i] == 'e')
2488      {
2489	if (i == 0)
2490	  {
2491	    x = XEXP (x, 0);
2492	    goto repeat;
2493	  }
2494	else
2495	  if (refers_to_p (XEXP (x, i), y))
2496	    return 1;
2497      }
2498    else if (fmt[i] == 'E')
2499      {
2500	int j;
2501	for (j = 0; j < XVECLEN (x, i); j++)
2502	  if (refers_to_p (XVECEXP (x, i, j), y))
2503	    return 1;
2504      }
2505
2506  return 0;
2507}
2508
2509/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2510   set PBASE, PSTART, and PEND which correspond to the base of the address,
2511   the starting offset, and ending offset respectively.
2512
2513   ADDR is known to be a nonvarying address.  */
2514
2515/* ??? Despite what the comments say, this function is in fact frequently
2516   passed varying addresses.  This does not appear to cause any problems.  */
2517
2518static void
2519set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2520     rtx addr;
2521     int size;
2522     rtx *pbase;
2523     HOST_WIDE_INT *pstart, *pend;
2524{
2525  rtx base;
2526  HOST_WIDE_INT start, end;
2527
2528  base = addr;
2529  start = 0;
2530  end = 0;
2531
2532  if (flag_pic && GET_CODE (base) == PLUS
2533      && XEXP (base, 0) == pic_offset_table_rtx)
2534    base = XEXP (base, 1);
2535
2536  /* Registers with nonvarying addresses usually have constant equivalents;
2537     but the frame pointer register is also possible.  */
2538  if (GET_CODE (base) == REG
2539      && qty_const != 0
2540      && REGNO_QTY_VALID_P (REGNO (base))
2541      && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2542      && qty_const[REG_QTY (REGNO (base))] != 0)
2543    base = qty_const[REG_QTY (REGNO (base))];
2544  else if (GET_CODE (base) == PLUS
2545	   && GET_CODE (XEXP (base, 1)) == CONST_INT
2546	   && GET_CODE (XEXP (base, 0)) == REG
2547	   && qty_const != 0
2548	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2549	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2550	       == GET_MODE (XEXP (base, 0)))
2551	   && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
2552    {
2553      start = INTVAL (XEXP (base, 1));
2554      base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2555    }
2556  /* This can happen as the result of virtual register instantiation,
2557     if the initial offset is too large to be a valid address.  */
2558  else if (GET_CODE (base) == PLUS
2559	   && GET_CODE (XEXP (base, 0)) == REG
2560	   && GET_CODE (XEXP (base, 1)) == REG
2561	   && qty_const != 0
2562	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2563	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2564	       == GET_MODE (XEXP (base, 0)))
2565	   && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
2566	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2567	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
2568	       == GET_MODE (XEXP (base, 1)))
2569	   && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
2570    {
2571      rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2572      base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2573
2574      /* One of the two values must be a constant.  */
2575      if (GET_CODE (base) != CONST_INT)
2576	{
2577	  if (GET_CODE (tem) != CONST_INT)
2578	    abort ();
2579	  start = INTVAL (tem);
2580	}
2581      else
2582	{
2583	  start = INTVAL (base);
2584	  base = tem;
2585	}
2586    }
2587
2588  /* Handle everything that we can find inside an address that has been
2589     viewed as constant.  */
2590
2591  while (1)
2592    {
2593      /* If no part of this switch does a "continue", the code outside
2594	 will exit this loop.  */
2595
2596      switch (GET_CODE (base))
2597	{
2598	case LO_SUM:
2599	  /* By definition, operand1 of a LO_SUM is the associated constant
2600	     address.  Use the associated constant address as the base
2601	     instead.  */
2602	  base = XEXP (base, 1);
2603	  continue;
2604
2605	case CONST:
2606	  /* Strip off CONST.  */
2607	  base = XEXP (base, 0);
2608	  continue;
2609
2610	case PLUS:
2611	  if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2612	    {
2613	      start += INTVAL (XEXP (base, 1));
2614	      base = XEXP (base, 0);
2615	      continue;
2616	    }
2617	  break;
2618
2619	case AND:
2620	  /* Handle the case of an AND which is the negative of a power of
2621	     two.  This is used to represent unaligned memory operations.  */
2622	  if (GET_CODE (XEXP (base, 1)) == CONST_INT
2623	      && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2624	    {
2625	      set_nonvarying_address_components (XEXP (base, 0), size,
2626						 pbase, pstart, pend);
2627
2628	      /* Assume the worst misalignment.  START is affected, but not
2629		 END, so compensate but adjusting SIZE.  Don't lose any
2630		 constant we already had.  */
2631
2632	      size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2633	      start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2634	      end += *pend;
2635	      base = *pbase;
2636	    }
2637	  break;
2638
2639	default:
2640	  break;
2641	}
2642
2643      break;
2644    }
2645
2646  if (GET_CODE (base) == CONST_INT)
2647    {
2648      start += INTVAL (base);
2649      base = const0_rtx;
2650    }
2651
2652  end = start + size;
2653
2654  /* Set the return values.  */
2655  *pbase = base;
2656  *pstart = start;
2657  *pend = end;
2658}
2659
2660/* Return 1 if X has a value that can vary even between two
2661   executions of the program.  0 means X can be compared reliably
2662   against certain constants or near-constants.  */
2663
2664static int
2665cse_rtx_varies_p (x)
2666     register rtx x;
2667{
2668  /* We need not check for X and the equivalence class being of the same
2669     mode because if X is equivalent to a constant in some mode, it
2670     doesn't vary in any mode.  */
2671
2672  if (GET_CODE (x) == REG
2673      && REGNO_QTY_VALID_P (REGNO (x))
2674      && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2675      && qty_const[REG_QTY (REGNO (x))] != 0)
2676    return 0;
2677
2678  if (GET_CODE (x) == PLUS
2679      && GET_CODE (XEXP (x, 1)) == CONST_INT
2680      && GET_CODE (XEXP (x, 0)) == REG
2681      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2682      && (GET_MODE (XEXP (x, 0))
2683	  == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2684      && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
2685    return 0;
2686
2687  /* This can happen as the result of virtual register instantiation, if
2688     the initial constant is too large to be a valid address.  This gives
2689     us a three instruction sequence, load large offset into a register,
2690     load fp minus a constant into a register, then a MEM which is the
2691     sum of the two `constant' registers.  */
2692  if (GET_CODE (x) == PLUS
2693      && GET_CODE (XEXP (x, 0)) == REG
2694      && GET_CODE (XEXP (x, 1)) == REG
2695      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2696      && (GET_MODE (XEXP (x, 0))
2697	  == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2698      && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
2699      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2700      && (GET_MODE (XEXP (x, 1))
2701	  == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2702      && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
2703    return 0;
2704
2705  return rtx_varies_p (x);
2706}
2707
2708/* Canonicalize an expression:
2709   replace each register reference inside it
2710   with the "oldest" equivalent register.
2711
2712   If INSN is non-zero and we are replacing a pseudo with a hard register
2713   or vice versa, validate_change is used to ensure that INSN remains valid
2714   after we make our substitution.  The calls are made with IN_GROUP non-zero
2715   so apply_change_group must be called upon the outermost return from this
2716   function (unless INSN is zero).  The result of apply_change_group can
2717   generally be discarded since the changes we are making are optional.  */
2718
2719static rtx
2720canon_reg (x, insn)
2721     rtx x;
2722     rtx insn;
2723{
2724  register int i;
2725  register enum rtx_code code;
2726  register char *fmt;
2727
2728  if (x == 0)
2729    return x;
2730
2731  code = GET_CODE (x);
2732  switch (code)
2733    {
2734    case PC:
2735    case CC0:
2736    case CONST:
2737    case CONST_INT:
2738    case CONST_DOUBLE:
2739    case SYMBOL_REF:
2740    case LABEL_REF:
2741    case ADDR_VEC:
2742    case ADDR_DIFF_VEC:
2743      return x;
2744
2745    case REG:
2746      {
2747	register int first;
2748
2749	/* Never replace a hard reg, because hard regs can appear
2750	   in more than one machine mode, and we must preserve the mode
2751	   of each occurrence.  Also, some hard regs appear in
2752	   MEMs that are shared and mustn't be altered.  Don't try to
2753	   replace any reg that maps to a reg of class NO_REGS.  */
2754	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2755	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2756	  return x;
2757
2758	first = qty_first_reg[REG_QTY (REGNO (x))];
2759	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2760		: REGNO_REG_CLASS (first) == NO_REGS ? x
2761		: gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
2762      }
2763
2764    default:
2765      break;
2766    }
2767
2768  fmt = GET_RTX_FORMAT (code);
2769  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2770    {
2771      register int j;
2772
2773      if (fmt[i] == 'e')
2774	{
2775	  rtx new = canon_reg (XEXP (x, i), insn);
2776	  int insn_code;
2777
2778	  /* If replacing pseudo with hard reg or vice versa, ensure the
2779	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2780	  if (insn != 0 && new != 0
2781	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2782	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2783		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2784		  || (insn_code = recog_memoized (insn)) < 0
2785		  || insn_n_dups[insn_code] > 0))
2786	    validate_change (insn, &XEXP (x, i), new, 1);
2787	  else
2788	    XEXP (x, i) = new;
2789	}
2790      else if (fmt[i] == 'E')
2791	for (j = 0; j < XVECLEN (x, i); j++)
2792	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2793    }
2794
2795  return x;
2796}
2797
2798/* LOC is a location within INSN that is an operand address (the contents of
2799   a MEM).  Find the best equivalent address to use that is valid for this
2800   insn.
2801
2802   On most CISC machines, complicated address modes are costly, and rtx_cost
2803   is a good approximation for that cost.  However, most RISC machines have
2804   only a few (usually only one) memory reference formats.  If an address is
2805   valid at all, it is often just as cheap as any other address.  Hence, for
2806   RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2807   costs of various addresses.  For two addresses of equal cost, choose the one
2808   with the highest `rtx_cost' value as that has the potential of eliminating
2809   the most insns.  For equal costs, we choose the first in the equivalence
2810   class.  Note that we ignore the fact that pseudo registers are cheaper
2811   than hard registers here because we would also prefer the pseudo registers.
2812  */
2813
2814static void
2815find_best_addr (insn, loc)
2816     rtx insn;
2817     rtx *loc;
2818{
2819  struct table_elt *elt;
2820  rtx addr = *loc;
2821#ifdef ADDRESS_COST
2822  struct table_elt *p;
2823  int found_better = 1;
2824#endif
2825  int save_do_not_record = do_not_record;
2826  int save_hash_arg_in_memory = hash_arg_in_memory;
2827  int save_hash_arg_in_struct = hash_arg_in_struct;
2828  int addr_volatile;
2829  int regno;
2830  unsigned hash;
2831
2832  /* Do not try to replace constant addresses or addresses of local and
2833     argument slots.  These MEM expressions are made only once and inserted
2834     in many instructions, as well as being used to control symbol table
2835     output.  It is not safe to clobber them.
2836
2837     There are some uncommon cases where the address is already in a register
2838     for some reason, but we cannot take advantage of that because we have
2839     no easy way to unshare the MEM.  In addition, looking up all stack
2840     addresses is costly.  */
2841  if ((GET_CODE (addr) == PLUS
2842       && GET_CODE (XEXP (addr, 0)) == REG
2843       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2844       && (regno = REGNO (XEXP (addr, 0)),
2845	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2846	   || regno == ARG_POINTER_REGNUM))
2847      || (GET_CODE (addr) == REG
2848	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2849	      || regno == HARD_FRAME_POINTER_REGNUM
2850	      || regno == ARG_POINTER_REGNUM))
2851      || GET_CODE (addr) == ADDRESSOF
2852      || CONSTANT_ADDRESS_P (addr))
2853    return;
2854
2855  /* If this address is not simply a register, try to fold it.  This will
2856     sometimes simplify the expression.  Many simplifications
2857     will not be valid, but some, usually applying the associative rule, will
2858     be valid and produce better code.  */
2859  if (GET_CODE (addr) != REG)
2860    {
2861      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2862
2863      if (1
2864#ifdef ADDRESS_COST
2865	  && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2866	      || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2867		  && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2868#else
2869	  && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2870#endif
2871	  && validate_change (insn, loc, folded, 0))
2872	addr = folded;
2873    }
2874
2875  /* If this address is not in the hash table, we can't look for equivalences
2876     of the whole address.  Also, ignore if volatile.  */
2877
2878  do_not_record = 0;
2879  hash = HASH (addr, Pmode);
2880  addr_volatile = do_not_record;
2881  do_not_record = save_do_not_record;
2882  hash_arg_in_memory = save_hash_arg_in_memory;
2883  hash_arg_in_struct = save_hash_arg_in_struct;
2884
2885  if (addr_volatile)
2886    return;
2887
2888  elt = lookup (addr, hash, Pmode);
2889
2890#ifndef ADDRESS_COST
2891  if (elt)
2892    {
2893      int our_cost = elt->cost;
2894
2895      /* Find the lowest cost below ours that works.  */
2896      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2897	if (elt->cost < our_cost
2898	    && (GET_CODE (elt->exp) == REG
2899		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2900	    && validate_change (insn, loc,
2901				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2902	  return;
2903    }
2904#else
2905
2906  if (elt)
2907    {
2908      /* We need to find the best (under the criteria documented above) entry
2909	 in the class that is valid.  We use the `flag' field to indicate
2910	 choices that were invalid and iterate until we can't find a better
2911	 one that hasn't already been tried.  */
2912
2913      for (p = elt->first_same_value; p; p = p->next_same_value)
2914	p->flag = 0;
2915
2916      while (found_better)
2917	{
2918	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2919	  int best_rtx_cost = (elt->cost + 1) >> 1;
2920	  struct table_elt *best_elt = elt;
2921
2922	  found_better = 0;
2923	  for (p = elt->first_same_value; p; p = p->next_same_value)
2924	    if (! p->flag)
2925	      {
2926		if ((GET_CODE (p->exp) == REG
2927		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2928		    && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2929			|| (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2930			    && (p->cost + 1) >> 1 > best_rtx_cost)))
2931		  {
2932		    found_better = 1;
2933		    best_addr_cost = CSE_ADDRESS_COST (p->exp);
2934		    best_rtx_cost = (p->cost + 1) >> 1;
2935		    best_elt = p;
2936		  }
2937	      }
2938
2939	  if (found_better)
2940	    {
2941	      if (validate_change (insn, loc,
2942				   canon_reg (copy_rtx (best_elt->exp),
2943					      NULL_RTX), 0))
2944		return;
2945	      else
2946		best_elt->flag = 1;
2947	    }
2948	}
2949    }
2950
2951  /* If the address is a binary operation with the first operand a register
2952     and the second a constant, do the same as above, but looking for
2953     equivalences of the register.  Then try to simplify before checking for
2954     the best address to use.  This catches a few cases:  First is when we
2955     have REG+const and the register is another REG+const.  We can often merge
2956     the constants and eliminate one insn and one register.  It may also be
2957     that a machine has a cheap REG+REG+const.  Finally, this improves the
2958     code on the Alpha for unaligned byte stores.  */
2959
2960  if (flag_expensive_optimizations
2961      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2962	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2963      && GET_CODE (XEXP (*loc, 0)) == REG
2964      && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2965    {
2966      rtx c = XEXP (*loc, 1);
2967
2968      do_not_record = 0;
2969      hash = HASH (XEXP (*loc, 0), Pmode);
2970      do_not_record = save_do_not_record;
2971      hash_arg_in_memory = save_hash_arg_in_memory;
2972      hash_arg_in_struct = save_hash_arg_in_struct;
2973
2974      elt = lookup (XEXP (*loc, 0), hash, Pmode);
2975      if (elt == 0)
2976	return;
2977
2978      /* We need to find the best (under the criteria documented above) entry
2979	 in the class that is valid.  We use the `flag' field to indicate
2980	 choices that were invalid and iterate until we can't find a better
2981	 one that hasn't already been tried.  */
2982
2983      for (p = elt->first_same_value; p; p = p->next_same_value)
2984	p->flag = 0;
2985
2986      while (found_better)
2987	{
2988	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2989	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
2990	  struct table_elt *best_elt = elt;
2991	  rtx best_rtx = *loc;
2992	  int count;
2993
2994	  /* This is at worst case an O(n^2) algorithm, so limit our search
2995	     to the first 32 elements on the list.  This avoids trouble
2996	     compiling code with very long basic blocks that can easily
2997	     call cse_gen_binary so many times that we run out of memory.  */
2998
2999	  found_better = 0;
3000	  for (p = elt->first_same_value, count = 0;
3001	       p && count < 32;
3002	       p = p->next_same_value, count++)
3003	    if (! p->flag
3004		&& (GET_CODE (p->exp) == REG
3005		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3006	      {
3007		rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
3008
3009		if ((CSE_ADDRESS_COST (new) < best_addr_cost
3010		    || (CSE_ADDRESS_COST (new) == best_addr_cost
3011			&& (COST (new) + 1) >> 1 > best_rtx_cost)))
3012		  {
3013		    found_better = 1;
3014		    best_addr_cost = CSE_ADDRESS_COST (new);
3015		    best_rtx_cost = (COST (new) + 1) >> 1;
3016		    best_elt = p;
3017		    best_rtx = new;
3018		  }
3019	      }
3020
3021	  if (found_better)
3022	    {
3023	      if (validate_change (insn, loc,
3024				   canon_reg (copy_rtx (best_rtx),
3025					      NULL_RTX), 0))
3026		return;
3027	      else
3028		best_elt->flag = 1;
3029	    }
3030	}
3031    }
3032#endif
3033}
3034
3035/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3036   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3037   what values are being compared.
3038
3039   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3040   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3041   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3042   compared to produce cc0.
3043
3044   The return value is the comparison operator and is either the code of
3045   A or the code corresponding to the inverse of the comparison.  */
3046
3047static enum rtx_code
3048find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3049     enum rtx_code code;
3050     rtx *parg1, *parg2;
3051     enum machine_mode *pmode1, *pmode2;
3052{
3053  rtx arg1, arg2;
3054
3055  arg1 = *parg1, arg2 = *parg2;
3056
3057  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3058
3059  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3060    {
3061      /* Set non-zero when we find something of interest.  */
3062      rtx x = 0;
3063      int reverse_code = 0;
3064      struct table_elt *p = 0;
3065
3066      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3067	 On machines with CC0, this is the only case that can occur, since
3068	 fold_rtx will return the COMPARE or item being compared with zero
3069	 when given CC0.  */
3070
3071      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3072	x = arg1;
3073
3074      /* If ARG1 is a comparison operator and CODE is testing for
3075	 STORE_FLAG_VALUE, get the inner arguments.  */
3076
3077      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3078	{
3079	  if (code == NE
3080	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3081		  && code == LT && STORE_FLAG_VALUE == -1)
3082#ifdef FLOAT_STORE_FLAG_VALUE
3083	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3084		  && FLOAT_STORE_FLAG_VALUE < 0)
3085#endif
3086	      )
3087	    x = arg1;
3088	  else if (code == EQ
3089		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3090		       && code == GE && STORE_FLAG_VALUE == -1)
3091#ifdef FLOAT_STORE_FLAG_VALUE
3092		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3093		       && FLOAT_STORE_FLAG_VALUE < 0)
3094#endif
3095		   )
3096	    x = arg1, reverse_code = 1;
3097	}
3098
3099      /* ??? We could also check for
3100
3101	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3102
3103	 and related forms, but let's wait until we see them occurring.  */
3104
3105      if (x == 0)
3106	/* Look up ARG1 in the hash table and see if it has an equivalence
3107	   that lets us see what is being compared.  */
3108	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3109		    GET_MODE (arg1));
3110      if (p) p = p->first_same_value;
3111
3112      for (; p; p = p->next_same_value)
3113	{
3114	  enum machine_mode inner_mode = GET_MODE (p->exp);
3115
3116	  /* If the entry isn't valid, skip it.  */
3117	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3118	    continue;
3119
3120	  if (GET_CODE (p->exp) == COMPARE
3121	      /* Another possibility is that this machine has a compare insn
3122		 that includes the comparison code.  In that case, ARG1 would
3123		 be equivalent to a comparison operation that would set ARG1 to
3124		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3125		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3126		 we must reverse ORIG_CODE.  On machine with a negative value
3127		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3128	      || ((code == NE
3129		   || (code == LT
3130		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3131		       && (GET_MODE_BITSIZE (inner_mode)
3132			   <= HOST_BITS_PER_WIDE_INT)
3133		       && (STORE_FLAG_VALUE
3134			   & ((HOST_WIDE_INT) 1
3135			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3136#ifdef FLOAT_STORE_FLAG_VALUE
3137		   || (code == LT
3138		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3139		       && FLOAT_STORE_FLAG_VALUE < 0)
3140#endif
3141		   )
3142		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3143	    {
3144	      x = p->exp;
3145	      break;
3146	    }
3147	  else if ((code == EQ
3148		    || (code == GE
3149			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3150			&& (GET_MODE_BITSIZE (inner_mode)
3151			    <= HOST_BITS_PER_WIDE_INT)
3152			&& (STORE_FLAG_VALUE
3153			    & ((HOST_WIDE_INT) 1
3154			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3155#ifdef FLOAT_STORE_FLAG_VALUE
3156		    || (code == GE
3157			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3158			&& FLOAT_STORE_FLAG_VALUE < 0)
3159#endif
3160		    )
3161		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3162	    {
3163	      reverse_code = 1;
3164	      x = p->exp;
3165	      break;
3166	    }
3167
3168	  /* If this is fp + constant, the equivalent is a better operand since
3169	     it may let us predict the value of the comparison.  */
3170	  else if (NONZERO_BASE_PLUS_P (p->exp))
3171	    {
3172	      arg1 = p->exp;
3173	      continue;
3174	    }
3175	}
3176
3177      /* If we didn't find a useful equivalence for ARG1, we are done.
3178	 Otherwise, set up for the next iteration.  */
3179      if (x == 0)
3180	break;
3181
3182      arg1 = XEXP (x, 0),  arg2 = XEXP (x, 1);
3183      if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3184	code = GET_CODE (x);
3185
3186      if (reverse_code)
3187	code = reverse_condition (code);
3188    }
3189
3190  /* Return our results.  Return the modes from before fold_rtx
3191     because fold_rtx might produce const_int, and then it's too late.  */
3192  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3193  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3194
3195  return code;
3196}
3197
3198/* Try to simplify a unary operation CODE whose output mode is to be
3199   MODE with input operand OP whose mode was originally OP_MODE.
3200   Return zero if no simplification can be made.  */
3201
3202rtx
3203simplify_unary_operation (code, mode, op, op_mode)
3204     enum rtx_code code;
3205     enum machine_mode mode;
3206     rtx op;
3207     enum machine_mode op_mode;
3208{
3209  register int width = GET_MODE_BITSIZE (mode);
3210
3211  /* The order of these tests is critical so that, for example, we don't
3212     check the wrong mode (input vs. output) for a conversion operation,
3213     such as FIX.  At some point, this should be simplified.  */
3214
3215#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3216
3217  if (code == FLOAT && GET_MODE (op) == VOIDmode
3218      && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3219    {
3220      HOST_WIDE_INT hv, lv;
3221      REAL_VALUE_TYPE d;
3222
3223      if (GET_CODE (op) == CONST_INT)
3224	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3225      else
3226	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3227
3228#ifdef REAL_ARITHMETIC
3229      REAL_VALUE_FROM_INT (d, lv, hv, mode);
3230#else
3231      if (hv < 0)
3232	{
3233	  d = (double) (~ hv);
3234	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3235		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3236	  d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3237	  d = (- d - 1.0);
3238	}
3239      else
3240	{
3241	  d = (double) hv;
3242	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3243		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3244	  d += (double) (unsigned HOST_WIDE_INT) lv;
3245	}
3246#endif  /* REAL_ARITHMETIC */
3247      d = real_value_truncate (mode, d);
3248      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3249    }
3250  else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3251	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3252    {
3253      HOST_WIDE_INT hv, lv;
3254      REAL_VALUE_TYPE d;
3255
3256      if (GET_CODE (op) == CONST_INT)
3257	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3258      else
3259	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3260
3261      if (op_mode == VOIDmode)
3262	{
3263	  /* We don't know how to interpret negative-looking numbers in
3264	     this case, so don't try to fold those.  */
3265	  if (hv < 0)
3266	    return 0;
3267	}
3268      else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3269	;
3270      else
3271	hv = 0, lv &= GET_MODE_MASK (op_mode);
3272
3273#ifdef REAL_ARITHMETIC
3274      REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3275#else
3276
3277      d = (double) (unsigned HOST_WIDE_INT) hv;
3278      d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3279	    * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3280      d += (double) (unsigned HOST_WIDE_INT) lv;
3281#endif  /* REAL_ARITHMETIC */
3282      d = real_value_truncate (mode, d);
3283      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3284    }
3285#endif
3286
3287  if (GET_CODE (op) == CONST_INT
3288      && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3289    {
3290      register HOST_WIDE_INT arg0 = INTVAL (op);
3291      register HOST_WIDE_INT val;
3292
3293      switch (code)
3294	{
3295	case NOT:
3296	  val = ~ arg0;
3297	  break;
3298
3299	case NEG:
3300	  val = - arg0;
3301	  break;
3302
3303	case ABS:
3304	  val = (arg0 >= 0 ? arg0 : - arg0);
3305	  break;
3306
3307	case FFS:
3308	  /* Don't use ffs here.  Instead, get low order bit and then its
3309	     number.  If arg0 is zero, this will return 0, as desired.  */
3310	  arg0 &= GET_MODE_MASK (mode);
3311	  val = exact_log2 (arg0 & (- arg0)) + 1;
3312	  break;
3313
3314	case TRUNCATE:
3315	  val = arg0;
3316	  break;
3317
3318	case ZERO_EXTEND:
3319	  if (op_mode == VOIDmode)
3320	    op_mode = mode;
3321	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3322	    {
3323	      /* If we were really extending the mode,
3324		 we would have to distinguish between zero-extension
3325		 and sign-extension.  */
3326	      if (width != GET_MODE_BITSIZE (op_mode))
3327		abort ();
3328	      val = arg0;
3329	    }
3330	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3331	    val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3332	  else
3333	    return 0;
3334	  break;
3335
3336	case SIGN_EXTEND:
3337	  if (op_mode == VOIDmode)
3338	    op_mode = mode;
3339	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3340	    {
3341	      /* If we were really extending the mode,
3342		 we would have to distinguish between zero-extension
3343		 and sign-extension.  */
3344	      if (width != GET_MODE_BITSIZE (op_mode))
3345		abort ();
3346	      val = arg0;
3347	    }
3348	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3349	    {
3350	      val
3351		= arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3352	      if (val
3353		  & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3354		val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3355	    }
3356	  else
3357	    return 0;
3358	  break;
3359
3360	case SQRT:
3361	  return 0;
3362
3363	default:
3364	  abort ();
3365	}
3366
3367      /* Clear the bits that don't belong in our mode,
3368	 unless they and our sign bit are all one.
3369	 So we get either a reasonable negative value or a reasonable
3370	 unsigned value for this mode.  */
3371      if (width < HOST_BITS_PER_WIDE_INT
3372	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3373	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3374	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3375
3376      /* If this would be an entire word for the target, but is not for
3377	 the host, then sign-extend on the host so that the number will look
3378	 the same way on the host that it would on the target.
3379
3380	 For example, when building a 64 bit alpha hosted 32 bit sparc
3381	 targeted compiler, then we want the 32 bit unsigned value -1 to be
3382	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3383	 The later confuses the sparc backend.  */
3384
3385      if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3386	  && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3387	val |= ((HOST_WIDE_INT) (-1) << width);
3388
3389      return GEN_INT (val);
3390    }
3391
3392  /* We can do some operations on integer CONST_DOUBLEs.  Also allow
3393     for a DImode operation on a CONST_INT.  */
3394  else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3395	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3396    {
3397      HOST_WIDE_INT l1, h1, lv, hv;
3398
3399      if (GET_CODE (op) == CONST_DOUBLE)
3400	l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3401      else
3402	l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3403
3404      switch (code)
3405	{
3406	case NOT:
3407	  lv = ~ l1;
3408	  hv = ~ h1;
3409	  break;
3410
3411	case NEG:
3412	  neg_double (l1, h1, &lv, &hv);
3413	  break;
3414
3415	case ABS:
3416	  if (h1 < 0)
3417	    neg_double (l1, h1, &lv, &hv);
3418	  else
3419	    lv = l1, hv = h1;
3420	  break;
3421
3422	case FFS:
3423	  hv = 0;
3424	  if (l1 == 0)
3425	    lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3426	  else
3427	    lv = exact_log2 (l1 & (-l1)) + 1;
3428	  break;
3429
3430	case TRUNCATE:
3431	  /* This is just a change-of-mode, so do nothing.  */
3432	  lv = l1, hv = h1;
3433	  break;
3434
3435	case ZERO_EXTEND:
3436	  if (op_mode == VOIDmode
3437	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3438	    return 0;
3439
3440	  hv = 0;
3441	  lv = l1 & GET_MODE_MASK (op_mode);
3442	  break;
3443
3444	case SIGN_EXTEND:
3445	  if (op_mode == VOIDmode
3446	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3447	    return 0;
3448	  else
3449	    {
3450	      lv = l1 & GET_MODE_MASK (op_mode);
3451	      if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3452		  && (lv & ((HOST_WIDE_INT) 1
3453			    << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3454		lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3455
3456	      hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3457	    }
3458	  break;
3459
3460	case SQRT:
3461	  return 0;
3462
3463	default:
3464	  return 0;
3465	}
3466
3467      return immed_double_const (lv, hv, mode);
3468    }
3469
3470#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3471  else if (GET_CODE (op) == CONST_DOUBLE
3472	   && GET_MODE_CLASS (mode) == MODE_FLOAT)
3473    {
3474      REAL_VALUE_TYPE d;
3475      jmp_buf handler;
3476      rtx x;
3477
3478      if (setjmp (handler))
3479	/* There used to be a warning here, but that is inadvisable.
3480	   People may want to cause traps, and the natural way
3481	   to do it should not get a warning.  */
3482	return 0;
3483
3484      set_float_handler (handler);
3485
3486      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3487
3488      switch (code)
3489	{
3490	case NEG:
3491	  d = REAL_VALUE_NEGATE (d);
3492	  break;
3493
3494	case ABS:
3495	  if (REAL_VALUE_NEGATIVE (d))
3496	    d = REAL_VALUE_NEGATE (d);
3497	  break;
3498
3499	case FLOAT_TRUNCATE:
3500	  d = real_value_truncate (mode, d);
3501	  break;
3502
3503	case FLOAT_EXTEND:
3504	  /* All this does is change the mode.  */
3505	  break;
3506
3507	case FIX:
3508	  d = REAL_VALUE_RNDZINT (d);
3509	  break;
3510
3511	case UNSIGNED_FIX:
3512	  d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3513	  break;
3514
3515	case SQRT:
3516	  return 0;
3517
3518	default:
3519	  abort ();
3520	}
3521
3522      x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3523      set_float_handler (NULL_PTR);
3524      return x;
3525    }
3526
3527  else if (GET_CODE (op) == CONST_DOUBLE
3528	   && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3529	   && GET_MODE_CLASS (mode) == MODE_INT
3530	   && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3531    {
3532      REAL_VALUE_TYPE d;
3533      jmp_buf handler;
3534      HOST_WIDE_INT val;
3535
3536      if (setjmp (handler))
3537	return 0;
3538
3539      set_float_handler (handler);
3540
3541      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3542
3543      switch (code)
3544	{
3545	case FIX:
3546	  val = REAL_VALUE_FIX (d);
3547	  break;
3548
3549	case UNSIGNED_FIX:
3550	  val = REAL_VALUE_UNSIGNED_FIX (d);
3551	  break;
3552
3553	default:
3554	  abort ();
3555	}
3556
3557      set_float_handler (NULL_PTR);
3558
3559      /* Clear the bits that don't belong in our mode,
3560	 unless they and our sign bit are all one.
3561	 So we get either a reasonable negative value or a reasonable
3562	 unsigned value for this mode.  */
3563      if (width < HOST_BITS_PER_WIDE_INT
3564	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3565	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3566	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3567
3568      /* If this would be an entire word for the target, but is not for
3569	 the host, then sign-extend on the host so that the number will look
3570	 the same way on the host that it would on the target.
3571
3572	 For example, when building a 64 bit alpha hosted 32 bit sparc
3573	 targeted compiler, then we want the 32 bit unsigned value -1 to be
3574	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3575	 The later confuses the sparc backend.  */
3576
3577      if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3578	  && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3579	val |= ((HOST_WIDE_INT) (-1) << width);
3580
3581      return GEN_INT (val);
3582    }
3583#endif
3584  /* This was formerly used only for non-IEEE float.
3585     eggert@twinsun.com says it is safe for IEEE also.  */
3586  else
3587    {
3588      /* There are some simplifications we can do even if the operands
3589	 aren't constant.  */
3590      switch (code)
3591	{
3592	case NEG:
3593	case NOT:
3594	  /* (not (not X)) == X, similarly for NEG.  */
3595	  if (GET_CODE (op) == code)
3596	    return XEXP (op, 0);
3597	  break;
3598
3599	case SIGN_EXTEND:
3600	  /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3601	     becomes just the MINUS if its mode is MODE.  This allows
3602	     folding switch statements on machines using casesi (such as
3603	     the Vax).  */
3604	  if (GET_CODE (op) == TRUNCATE
3605	      && GET_MODE (XEXP (op, 0)) == mode
3606	      && GET_CODE (XEXP (op, 0)) == MINUS
3607	      && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3608	      && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3609	    return XEXP (op, 0);
3610
3611#ifdef POINTERS_EXTEND_UNSIGNED
3612	  if (! POINTERS_EXTEND_UNSIGNED
3613	      && mode == Pmode && GET_MODE (op) == ptr_mode
3614	      && CONSTANT_P (op))
3615	    return convert_memory_address (Pmode, op);
3616#endif
3617	  break;
3618
3619#ifdef POINTERS_EXTEND_UNSIGNED
3620	case ZERO_EXTEND:
3621	  if (POINTERS_EXTEND_UNSIGNED
3622	      && mode == Pmode && GET_MODE (op) == ptr_mode
3623	      && CONSTANT_P (op))
3624	    return convert_memory_address (Pmode, op);
3625	  break;
3626#endif
3627
3628	default:
3629	  break;
3630	}
3631
3632      return 0;
3633    }
3634}
3635
3636/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3637   and OP1.  Return 0 if no simplification is possible.
3638
3639   Don't use this for relational operations such as EQ or LT.
3640   Use simplify_relational_operation instead.  */
3641
3642rtx
3643simplify_binary_operation (code, mode, op0, op1)
3644     enum rtx_code code;
3645     enum machine_mode mode;
3646     rtx op0, op1;
3647{
3648  register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3649  HOST_WIDE_INT val;
3650  int width = GET_MODE_BITSIZE (mode);
3651  rtx tem;
3652
3653  /* Relational operations don't work here.  We must know the mode
3654     of the operands in order to do the comparison correctly.
3655     Assuming a full word can give incorrect results.
3656     Consider comparing 128 with -128 in QImode.  */
3657
3658  if (GET_RTX_CLASS (code) == '<')
3659    abort ();
3660
3661#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3662  if (GET_MODE_CLASS (mode) == MODE_FLOAT
3663      && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3664      && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3665    {
3666      REAL_VALUE_TYPE f0, f1, value;
3667      jmp_buf handler;
3668
3669      if (setjmp (handler))
3670	return 0;
3671
3672      set_float_handler (handler);
3673
3674      REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3675      REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3676      f0 = real_value_truncate (mode, f0);
3677      f1 = real_value_truncate (mode, f1);
3678
3679#ifdef REAL_ARITHMETIC
3680#ifndef REAL_INFINITY
3681      if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3682	return 0;
3683#endif
3684      REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3685#else
3686      switch (code)
3687	{
3688	case PLUS:
3689	  value = f0 + f1;
3690	  break;
3691	case MINUS:
3692	  value = f0 - f1;
3693	  break;
3694	case MULT:
3695	  value = f0 * f1;
3696	  break;
3697	case DIV:
3698#ifndef REAL_INFINITY
3699	  if (f1 == 0)
3700	    return 0;
3701#endif
3702	  value = f0 / f1;
3703	  break;
3704	case SMIN:
3705	  value = MIN (f0, f1);
3706	  break;
3707	case SMAX:
3708	  value = MAX (f0, f1);
3709	  break;
3710	default:
3711	  abort ();
3712	}
3713#endif
3714
3715      value = real_value_truncate (mode, value);
3716      set_float_handler (NULL_PTR);
3717      return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3718    }
3719#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3720
3721  /* We can fold some multi-word operations.  */
3722  if (GET_MODE_CLASS (mode) == MODE_INT
3723      && width == HOST_BITS_PER_WIDE_INT * 2
3724      && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3725      && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3726    {
3727      HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3728
3729      if (GET_CODE (op0) == CONST_DOUBLE)
3730	l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3731      else
3732	l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3733
3734      if (GET_CODE (op1) == CONST_DOUBLE)
3735	l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3736      else
3737	l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3738
3739      switch (code)
3740	{
3741	case MINUS:
3742	  /* A - B == A + (-B).  */
3743	  neg_double (l2, h2, &lv, &hv);
3744	  l2 = lv, h2 = hv;
3745
3746	  /* .. fall through ...  */
3747
3748	case PLUS:
3749	  add_double (l1, h1, l2, h2, &lv, &hv);
3750	  break;
3751
3752	case MULT:
3753	  mul_double (l1, h1, l2, h2, &lv, &hv);
3754	  break;
3755
3756	case DIV:  case MOD:   case UDIV:  case UMOD:
3757	  /* We'd need to include tree.h to do this and it doesn't seem worth
3758	     it.  */
3759	  return 0;
3760
3761	case AND:
3762	  lv = l1 & l2, hv = h1 & h2;
3763	  break;
3764
3765	case IOR:
3766	  lv = l1 | l2, hv = h1 | h2;
3767	  break;
3768
3769	case XOR:
3770	  lv = l1 ^ l2, hv = h1 ^ h2;
3771	  break;
3772
3773	case SMIN:
3774	  if (h1 < h2
3775	      || (h1 == h2
3776		  && ((unsigned HOST_WIDE_INT) l1
3777		      < (unsigned HOST_WIDE_INT) l2)))
3778	    lv = l1, hv = h1;
3779	  else
3780	    lv = l2, hv = h2;
3781	  break;
3782
3783	case SMAX:
3784	  if (h1 > h2
3785	      || (h1 == h2
3786		  && ((unsigned HOST_WIDE_INT) l1
3787		      > (unsigned HOST_WIDE_INT) l2)))
3788	    lv = l1, hv = h1;
3789	  else
3790	    lv = l2, hv = h2;
3791	  break;
3792
3793	case UMIN:
3794	  if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3795	      || (h1 == h2
3796		  && ((unsigned HOST_WIDE_INT) l1
3797		      < (unsigned HOST_WIDE_INT) l2)))
3798	    lv = l1, hv = h1;
3799	  else
3800	    lv = l2, hv = h2;
3801	  break;
3802
3803	case UMAX:
3804	  if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3805	      || (h1 == h2
3806		  && ((unsigned HOST_WIDE_INT) l1
3807		      > (unsigned HOST_WIDE_INT) l2)))
3808	    lv = l1, hv = h1;
3809	  else
3810	    lv = l2, hv = h2;
3811	  break;
3812
3813	case LSHIFTRT:   case ASHIFTRT:
3814	case ASHIFT:
3815	case ROTATE:     case ROTATERT:
3816#ifdef SHIFT_COUNT_TRUNCATED
3817	  if (SHIFT_COUNT_TRUNCATED)
3818	    l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3819#endif
3820
3821	  if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3822	    return 0;
3823
3824	  if (code == LSHIFTRT || code == ASHIFTRT)
3825	    rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3826			   code == ASHIFTRT);
3827	  else if (code == ASHIFT)
3828	    lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3829	  else if (code == ROTATE)
3830	    lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3831	  else /* code == ROTATERT */
3832	    rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3833	  break;
3834
3835	default:
3836	  return 0;
3837	}
3838
3839      return immed_double_const (lv, hv, mode);
3840    }
3841
3842  if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3843      || width > HOST_BITS_PER_WIDE_INT || width == 0)
3844    {
3845      /* Even if we can't compute a constant result,
3846	 there are some cases worth simplifying.  */
3847
3848      switch (code)
3849	{
3850	case PLUS:
3851	  /* In IEEE floating point, x+0 is not the same as x.  Similarly
3852	     for the other optimizations below.  */
3853	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3854	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3855	    break;
3856
3857	  if (op1 == CONST0_RTX (mode))
3858	    return op0;
3859
3860	  /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3861	  if (GET_CODE (op0) == NEG)
3862	    return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3863	  else if (GET_CODE (op1) == NEG)
3864	    return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3865
3866	  /* Handle both-operands-constant cases.  We can only add
3867	     CONST_INTs to constants since the sum of relocatable symbols
3868	     can't be handled by most assemblers.  Don't add CONST_INT
3869	     to CONST_INT since overflow won't be computed properly if wider
3870	     than HOST_BITS_PER_WIDE_INT.  */
3871
3872	  if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3873	      && GET_CODE (op1) == CONST_INT)
3874	    return plus_constant (op0, INTVAL (op1));
3875	  else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3876		   && GET_CODE (op0) == CONST_INT)
3877	    return plus_constant (op1, INTVAL (op0));
3878
3879	  /* See if this is something like X * C - X or vice versa or
3880	     if the multiplication is written as a shift.  If so, we can
3881	     distribute and make a new multiply, shift, or maybe just
3882	     have X (if C is 2 in the example above).  But don't make
3883	     real multiply if we didn't have one before.  */
3884
3885	  if (! FLOAT_MODE_P (mode))
3886	    {
3887	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3888	      rtx lhs = op0, rhs = op1;
3889	      int had_mult = 0;
3890
3891	      if (GET_CODE (lhs) == NEG)
3892		coeff0 = -1, lhs = XEXP (lhs, 0);
3893	      else if (GET_CODE (lhs) == MULT
3894		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3895		{
3896		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3897		  had_mult = 1;
3898		}
3899	      else if (GET_CODE (lhs) == ASHIFT
3900		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3901		       && INTVAL (XEXP (lhs, 1)) >= 0
3902		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3903		{
3904		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3905		  lhs = XEXP (lhs, 0);
3906		}
3907
3908	      if (GET_CODE (rhs) == NEG)
3909		coeff1 = -1, rhs = XEXP (rhs, 0);
3910	      else if (GET_CODE (rhs) == MULT
3911		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3912		{
3913		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3914		  had_mult = 1;
3915		}
3916	      else if (GET_CODE (rhs) == ASHIFT
3917		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3918		       && INTVAL (XEXP (rhs, 1)) >= 0
3919		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3920		{
3921		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3922		  rhs = XEXP (rhs, 0);
3923		}
3924
3925	      if (rtx_equal_p (lhs, rhs))
3926		{
3927		  tem = cse_gen_binary (MULT, mode, lhs,
3928					GEN_INT (coeff0 + coeff1));
3929		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3930		}
3931	    }
3932
3933	  /* If one of the operands is a PLUS or a MINUS, see if we can
3934	     simplify this by the associative law.
3935	     Don't use the associative law for floating point.
3936	     The inaccuracy makes it nonassociative,
3937	     and subtle programs can break if operations are associated.  */
3938
3939	  if (INTEGRAL_MODE_P (mode)
3940	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3941		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3942	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3943	    return tem;
3944	  break;
3945
3946	case COMPARE:
3947#ifdef HAVE_cc0
3948	  /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3949	     using cc0, in which case we want to leave it as a COMPARE
3950	     so we can distinguish it from a register-register-copy.
3951
3952	     In IEEE floating point, x-0 is not the same as x.  */
3953
3954	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3955	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
3956	      && op1 == CONST0_RTX (mode))
3957	    return op0;
3958#else
3959	  /* Do nothing here.  */
3960#endif
3961	  break;
3962
3963	case MINUS:
3964	  /* None of these optimizations can be done for IEEE
3965	     floating point.  */
3966	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3967	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3968	    break;
3969
3970	  /* We can't assume x-x is 0 even with non-IEEE floating point,
3971	     but since it is zero except in very strange circumstances, we
3972	     will treat it as zero with -ffast-math.  */
3973	  if (rtx_equal_p (op0, op1)
3974	      && ! side_effects_p (op0)
3975	      && (! FLOAT_MODE_P (mode) || flag_fast_math))
3976	    return CONST0_RTX (mode);
3977
3978	  /* Change subtraction from zero into negation.  */
3979	  if (op0 == CONST0_RTX (mode))
3980	    return gen_rtx_NEG (mode, op1);
3981
3982	  /* (-1 - a) is ~a.  */
3983	  if (op0 == constm1_rtx)
3984	    return gen_rtx_NOT (mode, op1);
3985
3986	  /* Subtracting 0 has no effect.  */
3987	  if (op1 == CONST0_RTX (mode))
3988	    return op0;
3989
3990	  /* See if this is something like X * C - X or vice versa or
3991	     if the multiplication is written as a shift.  If so, we can
3992	     distribute and make a new multiply, shift, or maybe just
3993	     have X (if C is 2 in the example above).  But don't make
3994	     real multiply if we didn't have one before.  */
3995
3996	  if (! FLOAT_MODE_P (mode))
3997	    {
3998	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3999	      rtx lhs = op0, rhs = op1;
4000	      int had_mult = 0;
4001
4002	      if (GET_CODE (lhs) == NEG)
4003		coeff0 = -1, lhs = XEXP (lhs, 0);
4004	      else if (GET_CODE (lhs) == MULT
4005		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
4006		{
4007		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
4008		  had_mult = 1;
4009		}
4010	      else if (GET_CODE (lhs) == ASHIFT
4011		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
4012		       && INTVAL (XEXP (lhs, 1)) >= 0
4013		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
4014		{
4015		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
4016		  lhs = XEXP (lhs, 0);
4017		}
4018
4019	      if (GET_CODE (rhs) == NEG)
4020		coeff1 = - 1, rhs = XEXP (rhs, 0);
4021	      else if (GET_CODE (rhs) == MULT
4022		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
4023		{
4024		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
4025		  had_mult = 1;
4026		}
4027	      else if (GET_CODE (rhs) == ASHIFT
4028		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
4029		       && INTVAL (XEXP (rhs, 1)) >= 0
4030		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
4031		{
4032		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
4033		  rhs = XEXP (rhs, 0);
4034		}
4035
4036	      if (rtx_equal_p (lhs, rhs))
4037		{
4038		  tem = cse_gen_binary (MULT, mode, lhs,
4039					GEN_INT (coeff0 - coeff1));
4040		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4041		}
4042	    }
4043
4044	  /* (a - (-b)) -> (a + b).  */
4045	  if (GET_CODE (op1) == NEG)
4046	    return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
4047
4048	  /* If one of the operands is a PLUS or a MINUS, see if we can
4049	     simplify this by the associative law.
4050	     Don't use the associative law for floating point.
4051	     The inaccuracy makes it nonassociative,
4052	     and subtle programs can break if operations are associated.  */
4053
4054	  if (INTEGRAL_MODE_P (mode)
4055	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4056		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4057	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4058	    return tem;
4059
4060	  /* Don't let a relocatable value get a negative coeff.  */
4061	  if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
4062	    return plus_constant (op0, - INTVAL (op1));
4063
4064	  /* (x - (x & y)) -> (x & ~y) */
4065	  if (GET_CODE (op1) == AND)
4066	    {
4067	     if (rtx_equal_p (op0, XEXP (op1, 0)))
4068	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
4069	     if (rtx_equal_p (op0, XEXP (op1, 1)))
4070	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
4071	   }
4072	  break;
4073
4074	case MULT:
4075	  if (op1 == constm1_rtx)
4076	    {
4077	      tem = simplify_unary_operation (NEG, mode, op0, mode);
4078
4079	      return tem ? tem : gen_rtx_NEG (mode, op0);
4080	    }
4081
4082	  /* In IEEE floating point, x*0 is not always 0.  */
4083	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4084	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
4085	      && op1 == CONST0_RTX (mode)
4086	      && ! side_effects_p (op0))
4087	    return op1;
4088
4089	  /* In IEEE floating point, x*1 is not equivalent to x for nans.
4090	     However, ANSI says we can drop signals,
4091	     so we can do this anyway.  */
4092	  if (op1 == CONST1_RTX (mode))
4093	    return op0;
4094
4095	  /* Convert multiply by constant power of two into shift unless
4096	     we are still generating RTL.  This test is a kludge.  */
4097	  if (GET_CODE (op1) == CONST_INT
4098	      && (val = exact_log2 (INTVAL (op1))) >= 0
4099	      /* If the mode is larger than the host word size, and the
4100		 uppermost bit is set, then this isn't a power of two due
4101		 to implicit sign extension.  */
4102	      && (width <= HOST_BITS_PER_WIDE_INT
4103		  || val != HOST_BITS_PER_WIDE_INT - 1)
4104	      && ! rtx_equal_function_value_matters)
4105	    return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
4106
4107	  if (GET_CODE (op1) == CONST_DOUBLE
4108	      && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4109	    {
4110	      REAL_VALUE_TYPE d;
4111	      jmp_buf handler;
4112	      int op1is2, op1ism1;
4113
4114	      if (setjmp (handler))
4115		return 0;
4116
4117	      set_float_handler (handler);
4118	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4119	      op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4120	      op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4121	      set_float_handler (NULL_PTR);
4122
4123	      /* x*2 is x+x and x*(-1) is -x */
4124	      if (op1is2 && GET_MODE (op0) == mode)
4125		return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
4126
4127	      else if (op1ism1 && GET_MODE (op0) == mode)
4128		return gen_rtx_NEG (mode, op0);
4129	    }
4130	  break;
4131
4132	case IOR:
4133	  if (op1 == const0_rtx)
4134	    return op0;
4135	  if (GET_CODE (op1) == CONST_INT
4136	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4137	    return op1;
4138	  if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4139	    return op0;
4140	  /* A | (~A) -> -1 */
4141	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4142	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4143	      && ! side_effects_p (op0)
4144	      && GET_MODE_CLASS (mode) != MODE_CC)
4145	    return constm1_rtx;
4146	  break;
4147
4148	case XOR:
4149	  if (op1 == const0_rtx)
4150	    return op0;
4151	  if (GET_CODE (op1) == CONST_INT
4152	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4153	    return gen_rtx_NOT (mode, op0);
4154	  if (op0 == op1 && ! side_effects_p (op0)
4155	      && GET_MODE_CLASS (mode) != MODE_CC)
4156	    return const0_rtx;
4157	  break;
4158
4159	case AND:
4160	  if (op1 == const0_rtx && ! side_effects_p (op0))
4161	    return const0_rtx;
4162	  if (GET_CODE (op1) == CONST_INT
4163	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4164	    return op0;
4165	  if (op0 == op1 && ! side_effects_p (op0)
4166	      && GET_MODE_CLASS (mode) != MODE_CC)
4167	    return op0;
4168	  /* A & (~A) -> 0 */
4169	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4170	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4171	      && ! side_effects_p (op0)
4172	      && GET_MODE_CLASS (mode) != MODE_CC)
4173	    return const0_rtx;
4174	  break;
4175
4176	case UDIV:
4177	  /* Convert divide by power of two into shift (divide by 1 handled
4178	     below).  */
4179	  if (GET_CODE (op1) == CONST_INT
4180	      && (arg1 = exact_log2 (INTVAL (op1))) > 0)
4181	    return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
4182
4183	  /* ... fall through ...  */
4184
4185	case DIV:
4186	  if (op1 == CONST1_RTX (mode))
4187	    return op0;
4188
4189	  /* In IEEE floating point, 0/x is not always 0.  */
4190	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4191	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
4192	      && op0 == CONST0_RTX (mode)
4193	      && ! side_effects_p (op1))
4194	    return op0;
4195
4196#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4197	  /* Change division by a constant into multiplication.  Only do
4198	     this with -ffast-math until an expert says it is safe in
4199	     general.  */
4200	  else if (GET_CODE (op1) == CONST_DOUBLE
4201		   && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
4202		   && op1 != CONST0_RTX (mode)
4203		   && flag_fast_math)
4204	    {
4205	      REAL_VALUE_TYPE d;
4206	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4207
4208	      if (! REAL_VALUES_EQUAL (d, dconst0))
4209		{
4210#if defined (REAL_ARITHMETIC)
4211		  REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
4212		  return gen_rtx_MULT (mode, op0,
4213				       CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
4214#else
4215		  return gen_rtx_MULT (mode, op0,
4216				       CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
4217#endif
4218		}
4219	    }
4220#endif
4221	  break;
4222
4223	case UMOD:
4224	  /* Handle modulus by power of two (mod with 1 handled below).  */
4225	  if (GET_CODE (op1) == CONST_INT
4226	      && exact_log2 (INTVAL (op1)) > 0)
4227	    return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
4228
4229	  /* ... fall through ...  */
4230
4231	case MOD:
4232	  if ((op0 == const0_rtx || op1 == const1_rtx)
4233	      && ! side_effects_p (op0) && ! side_effects_p (op1))
4234	    return const0_rtx;
4235	  break;
4236
4237	case ROTATERT:
4238	case ROTATE:
4239	  /* Rotating ~0 always results in ~0.  */
4240	  if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4241	      && INTVAL (op0) == GET_MODE_MASK (mode)
4242	      && ! side_effects_p (op1))
4243	    return op0;
4244
4245	  /* ... fall through ...  */
4246
4247	case ASHIFT:
4248	case ASHIFTRT:
4249	case LSHIFTRT:
4250	  if (op1 == const0_rtx)
4251	    return op0;
4252	  if (op0 == const0_rtx && ! side_effects_p (op1))
4253	    return op0;
4254	  break;
4255
4256	case SMIN:
4257	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4258	      && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4259	      && ! side_effects_p (op0))
4260	    return op1;
4261	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4262	    return op0;
4263	  break;
4264
4265	case SMAX:
4266	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4267	      && (INTVAL (op1)
4268		  == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4269	      && ! side_effects_p (op0))
4270	    return op1;
4271	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4272	    return op0;
4273	  break;
4274
4275	case UMIN:
4276	  if (op1 == const0_rtx && ! side_effects_p (op0))
4277	    return op1;
4278	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4279	    return op0;
4280	  break;
4281
4282	case UMAX:
4283	  if (op1 == constm1_rtx && ! side_effects_p (op0))
4284	    return op1;
4285	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4286	    return op0;
4287	  break;
4288
4289	default:
4290	  abort ();
4291	}
4292
4293      return 0;
4294    }
4295
4296  /* Get the integer argument values in two forms:
4297     zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S.  */
4298
4299  arg0 = INTVAL (op0);
4300  arg1 = INTVAL (op1);
4301
4302  if (width < HOST_BITS_PER_WIDE_INT)
4303    {
4304      arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4305      arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4306
4307      arg0s = arg0;
4308      if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4309	arg0s |= ((HOST_WIDE_INT) (-1) << width);
4310
4311      arg1s = arg1;
4312      if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4313	arg1s |= ((HOST_WIDE_INT) (-1) << width);
4314    }
4315  else
4316    {
4317      arg0s = arg0;
4318      arg1s = arg1;
4319    }
4320
4321  /* Compute the value of the arithmetic.  */
4322
4323  switch (code)
4324    {
4325    case PLUS:
4326      val = arg0s + arg1s;
4327      break;
4328
4329    case MINUS:
4330      val = arg0s - arg1s;
4331      break;
4332
4333    case MULT:
4334      val = arg0s * arg1s;
4335      break;
4336
4337    case DIV:
4338      if (arg1s == 0)
4339	return 0;
4340      val = arg0s / arg1s;
4341      break;
4342
4343    case MOD:
4344      if (arg1s == 0)
4345	return 0;
4346      val = arg0s % arg1s;
4347      break;
4348
4349    case UDIV:
4350      if (arg1 == 0)
4351	return 0;
4352      val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4353      break;
4354
4355    case UMOD:
4356      if (arg1 == 0)
4357	return 0;
4358      val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4359      break;
4360
4361    case AND:
4362      val = arg0 & arg1;
4363      break;
4364
4365    case IOR:
4366      val = arg0 | arg1;
4367      break;
4368
4369    case XOR:
4370      val = arg0 ^ arg1;
4371      break;
4372
4373    case LSHIFTRT:
4374      /* If shift count is undefined, don't fold it; let the machine do
4375	 what it wants.  But truncate it if the machine will do that.  */
4376      if (arg1 < 0)
4377	return 0;
4378
4379#ifdef SHIFT_COUNT_TRUNCATED
4380      if (SHIFT_COUNT_TRUNCATED)
4381	arg1 %= width;
4382#endif
4383
4384      val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4385      break;
4386
4387    case ASHIFT:
4388      if (arg1 < 0)
4389	return 0;
4390
4391#ifdef SHIFT_COUNT_TRUNCATED
4392      if (SHIFT_COUNT_TRUNCATED)
4393	arg1 %= width;
4394#endif
4395
4396      val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4397      break;
4398
4399    case ASHIFTRT:
4400      if (arg1 < 0)
4401	return 0;
4402
4403#ifdef SHIFT_COUNT_TRUNCATED
4404      if (SHIFT_COUNT_TRUNCATED)
4405	arg1 %= width;
4406#endif
4407
4408      val = arg0s >> arg1;
4409
4410      /* Bootstrap compiler may not have sign extended the right shift.
4411	 Manually extend the sign to insure bootstrap cc matches gcc.  */
4412      if (arg0s < 0 && arg1 > 0)
4413	val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4414
4415      break;
4416
4417    case ROTATERT:
4418      if (arg1 < 0)
4419	return 0;
4420
4421      arg1 %= width;
4422      val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4423	     | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4424      break;
4425
4426    case ROTATE:
4427      if (arg1 < 0)
4428	return 0;
4429
4430      arg1 %= width;
4431      val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4432	     | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4433      break;
4434
4435    case COMPARE:
4436      /* Do nothing here.  */
4437      return 0;
4438
4439    case SMIN:
4440      val = arg0s <= arg1s ? arg0s : arg1s;
4441      break;
4442
4443    case UMIN:
4444      val = ((unsigned HOST_WIDE_INT) arg0
4445	     <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4446      break;
4447
4448    case SMAX:
4449      val = arg0s > arg1s ? arg0s : arg1s;
4450      break;
4451
4452    case UMAX:
4453      val = ((unsigned HOST_WIDE_INT) arg0
4454	     > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4455      break;
4456
4457    default:
4458      abort ();
4459    }
4460
4461  /* Clear the bits that don't belong in our mode, unless they and our sign
4462     bit are all one.  So we get either a reasonable negative value or a
4463     reasonable unsigned value for this mode.  */
4464  if (width < HOST_BITS_PER_WIDE_INT
4465      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4466	  != ((HOST_WIDE_INT) (-1) << (width - 1))))
4467    val &= ((HOST_WIDE_INT) 1 << width) - 1;
4468
4469  /* If this would be an entire word for the target, but is not for
4470     the host, then sign-extend on the host so that the number will look
4471     the same way on the host that it would on the target.
4472
4473     For example, when building a 64 bit alpha hosted 32 bit sparc
4474     targeted compiler, then we want the 32 bit unsigned value -1 to be
4475     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4476     The later confuses the sparc backend.  */
4477
4478  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4479      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4480    val |= ((HOST_WIDE_INT) (-1) << width);
4481
4482  return GEN_INT (val);
4483}
4484
4485/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4486   PLUS or MINUS.
4487
4488   Rather than test for specific case, we do this by a brute-force method
4489   and do all possible simplifications until no more changes occur.  Then
4490   we rebuild the operation.  */
4491
4492static rtx
4493simplify_plus_minus (code, mode, op0, op1)
4494     enum rtx_code code;
4495     enum machine_mode mode;
4496     rtx op0, op1;
4497{
4498  rtx ops[8];
4499  int negs[8];
4500  rtx result, tem;
4501  int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4502  int first = 1, negate = 0, changed;
4503  int i, j;
4504
4505  bzero ((char *) ops, sizeof ops);
4506
4507  /* Set up the two operands and then expand them until nothing has been
4508     changed.  If we run out of room in our array, give up; this should
4509     almost never happen.  */
4510
4511  ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4512
4513  changed = 1;
4514  while (changed)
4515    {
4516      changed = 0;
4517
4518      for (i = 0; i < n_ops; i++)
4519	switch (GET_CODE (ops[i]))
4520	  {
4521	  case PLUS:
4522	  case MINUS:
4523	    if (n_ops == 7)
4524	      return 0;
4525
4526	    ops[n_ops] = XEXP (ops[i], 1);
4527	    negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4528	    ops[i] = XEXP (ops[i], 0);
4529	    input_ops++;
4530	    changed = 1;
4531	    break;
4532
4533	  case NEG:
4534	    ops[i] = XEXP (ops[i], 0);
4535	    negs[i] = ! negs[i];
4536	    changed = 1;
4537	    break;
4538
4539	  case CONST:
4540	    ops[i] = XEXP (ops[i], 0);
4541	    input_consts++;
4542	    changed = 1;
4543	    break;
4544
4545	  case NOT:
4546	    /* ~a -> (-a - 1) */
4547	    if (n_ops != 7)
4548	      {
4549		ops[n_ops] = constm1_rtx;
4550		negs[n_ops++] = negs[i];
4551		ops[i] = XEXP (ops[i], 0);
4552		negs[i] = ! negs[i];
4553		changed = 1;
4554	      }
4555	    break;
4556
4557	  case CONST_INT:
4558	    if (negs[i])
4559	      ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4560	    break;
4561
4562	  default:
4563	    break;
4564	  }
4565    }
4566
4567  /* If we only have two operands, we can't do anything.  */
4568  if (n_ops <= 2)
4569    return 0;
4570
4571  /* Now simplify each pair of operands until nothing changes.  The first
4572     time through just simplify constants against each other.  */
4573
4574  changed = 1;
4575  while (changed)
4576    {
4577      changed = first;
4578
4579      for (i = 0; i < n_ops - 1; i++)
4580	for (j = i + 1; j < n_ops; j++)
4581	  if (ops[i] != 0 && ops[j] != 0
4582	      && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4583	    {
4584	      rtx lhs = ops[i], rhs = ops[j];
4585	      enum rtx_code ncode = PLUS;
4586
4587	      if (negs[i] && ! negs[j])
4588		lhs = ops[j], rhs = ops[i], ncode = MINUS;
4589	      else if (! negs[i] && negs[j])
4590		ncode = MINUS;
4591
4592	      tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4593	      if (tem)
4594		{
4595		  ops[i] = tem, ops[j] = 0;
4596		  negs[i] = negs[i] && negs[j];
4597		  if (GET_CODE (tem) == NEG)
4598		    ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4599
4600		  if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4601		    ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4602		  changed = 1;
4603		}
4604	    }
4605
4606      first = 0;
4607    }
4608
4609  /* Pack all the operands to the lower-numbered entries and give up if
4610     we didn't reduce the number of operands we had.  Make sure we
4611     count a CONST as two operands.  If we have the same number of
4612     operands, but have made more CONSTs than we had, this is also
4613     an improvement, so accept it.  */
4614
4615  for (i = 0, j = 0; j < n_ops; j++)
4616    if (ops[j] != 0)
4617      {
4618	ops[i] = ops[j], negs[i++] = negs[j];
4619	if (GET_CODE (ops[j]) == CONST)
4620	  n_consts++;
4621      }
4622
4623  if (i + n_consts > input_ops
4624      || (i + n_consts == input_ops && n_consts <= input_consts))
4625    return 0;
4626
4627  n_ops = i;
4628
4629  /* If we have a CONST_INT, put it last.  */
4630  for (i = 0; i < n_ops - 1; i++)
4631    if (GET_CODE (ops[i]) == CONST_INT)
4632      {
4633	tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4634	j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4635      }
4636
4637  /* Put a non-negated operand first.  If there aren't any, make all
4638     operands positive and negate the whole thing later.  */
4639  for (i = 0; i < n_ops && negs[i]; i++)
4640    ;
4641
4642  if (i == n_ops)
4643    {
4644      for (i = 0; i < n_ops; i++)
4645	negs[i] = 0;
4646      negate = 1;
4647    }
4648  else if (i != 0)
4649    {
4650      tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4651      j = negs[0], negs[0] = negs[i], negs[i] = j;
4652    }
4653
4654  /* Now make the result by performing the requested operations.  */
4655  result = ops[0];
4656  for (i = 1; i < n_ops; i++)
4657    result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4658
4659  return negate ? gen_rtx_NEG (mode, result) : result;
4660}
4661
4662/* Make a binary operation by properly ordering the operands and
4663   seeing if the expression folds.  */
4664
4665static rtx
4666cse_gen_binary (code, mode, op0, op1)
4667     enum rtx_code code;
4668     enum machine_mode mode;
4669     rtx op0, op1;
4670{
4671  rtx tem;
4672
4673  /* Put complex operands first and constants second if commutative.  */
4674  if (GET_RTX_CLASS (code) == 'c'
4675      && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4676	  || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4677	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4678	  || (GET_CODE (op0) == SUBREG
4679	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4680	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4681    tem = op0, op0 = op1, op1 = tem;
4682
4683  /* If this simplifies, do it.  */
4684  tem = simplify_binary_operation (code, mode, op0, op1);
4685
4686  if (tem)
4687    return tem;
4688
4689  /* Handle addition and subtraction of CONST_INT specially.  Otherwise,
4690     just form the operation.  */
4691
4692  if (code == PLUS && GET_CODE (op1) == CONST_INT
4693      && GET_MODE (op0) != VOIDmode)
4694    return plus_constant (op0, INTVAL (op1));
4695  else if (code == MINUS && GET_CODE (op1) == CONST_INT
4696	   && GET_MODE (op0) != VOIDmode)
4697    return plus_constant (op0, - INTVAL (op1));
4698  else
4699    return gen_rtx_fmt_ee (code, mode, op0, op1);
4700}
4701
4702struct cfc_args
4703{
4704  /* Input */
4705  rtx op0, op1;
4706  /* Output */
4707  int equal, op0lt, op1lt;
4708};
4709
4710static void
4711check_fold_consts (data)
4712  PTR data;
4713{
4714  struct cfc_args * args = (struct cfc_args *) data;
4715  REAL_VALUE_TYPE d0, d1;
4716
4717  REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4718  REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4719  args->equal = REAL_VALUES_EQUAL (d0, d1);
4720  args->op0lt = REAL_VALUES_LESS (d0, d1);
4721  args->op1lt = REAL_VALUES_LESS (d1, d0);
4722}
4723
4724/* Like simplify_binary_operation except used for relational operators.
4725   MODE is the mode of the operands, not that of the result.  If MODE
4726   is VOIDmode, both operands must also be VOIDmode and we compare the
4727   operands in "infinite precision".
4728
4729   If no simplification is possible, this function returns zero.  Otherwise,
4730   it returns either const_true_rtx or const0_rtx.  */
4731
4732rtx
4733simplify_relational_operation (code, mode, op0, op1)
4734     enum rtx_code code;
4735     enum machine_mode mode;
4736     rtx op0, op1;
4737{
4738  int equal, op0lt, op0ltu, op1lt, op1ltu;
4739  rtx tem;
4740
4741  /* If op0 is a compare, extract the comparison arguments from it.  */
4742  if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4743    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4744
4745  /* We can't simplify MODE_CC values since we don't know what the
4746     actual comparison is.  */
4747  if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4748#ifdef HAVE_cc0
4749      || op0 == cc0_rtx
4750#endif
4751      )
4752    return 0;
4753
4754  /* For integer comparisons of A and B maybe we can simplify A - B and can
4755     then simplify a comparison of that with zero.  If A and B are both either
4756     a register or a CONST_INT, this can't help; testing for these cases will
4757     prevent infinite recursion here and speed things up.
4758
4759     If CODE is an unsigned comparison, then we can never do this optimization,
4760     because it gives an incorrect result if the subtraction wraps around zero.
4761     ANSI C defines unsigned operations such that they never overflow, and
4762     thus such cases can not be ignored.  */
4763
4764  if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4765      && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4766	    && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4767      && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4768      && code != GTU && code != GEU && code != LTU && code != LEU)
4769    return simplify_relational_operation (signed_condition (code),
4770					  mode, tem, const0_rtx);
4771
4772  /* For non-IEEE floating-point, if the two operands are equal, we know the
4773     result.  */
4774  if (rtx_equal_p (op0, op1)
4775      && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4776	  || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4777    equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4778
4779  /* If the operands are floating-point constants, see if we can fold
4780     the result.  */
4781#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4782  else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4783	   && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4784    {
4785      struct cfc_args args;
4786
4787      /* Setup input for check_fold_consts() */
4788      args.op0 = op0;
4789      args.op1 = op1;
4790
4791      if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4792	/* We got an exception from check_fold_consts() */
4793	return 0;
4794
4795      /* Receive output from check_fold_consts() */
4796      equal = args.equal;
4797      op0lt = op0ltu = args.op0lt;
4798      op1lt = op1ltu = args.op1lt;
4799    }
4800#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4801
4802  /* Otherwise, see if the operands are both integers.  */
4803  else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4804	   && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4805	   && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4806    {
4807      int width = GET_MODE_BITSIZE (mode);
4808      HOST_WIDE_INT l0s, h0s, l1s, h1s;
4809      unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4810
4811      /* Get the two words comprising each integer constant.  */
4812      if (GET_CODE (op0) == CONST_DOUBLE)
4813	{
4814	  l0u = l0s = CONST_DOUBLE_LOW (op0);
4815	  h0u = h0s = CONST_DOUBLE_HIGH (op0);
4816	}
4817      else
4818	{
4819	  l0u = l0s = INTVAL (op0);
4820	  h0u = h0s = l0s < 0 ? -1 : 0;
4821	}
4822
4823      if (GET_CODE (op1) == CONST_DOUBLE)
4824	{
4825	  l1u = l1s = CONST_DOUBLE_LOW (op1);
4826	  h1u = h1s = CONST_DOUBLE_HIGH (op1);
4827	}
4828      else
4829	{
4830	  l1u = l1s = INTVAL (op1);
4831	  h1u = h1s = l1s < 0 ? -1 : 0;
4832	}
4833
4834      /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4835	 we have to sign or zero-extend the values.  */
4836      if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4837	h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4838
4839      if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4840	{
4841	  l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4842	  l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4843
4844	  if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4845	    l0s |= ((HOST_WIDE_INT) (-1) << width);
4846
4847	  if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4848	    l1s |= ((HOST_WIDE_INT) (-1) << width);
4849	}
4850
4851      equal = (h0u == h1u && l0u == l1u);
4852      op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4853      op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4854      op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4855      op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4856    }
4857
4858  /* Otherwise, there are some code-specific tests we can make.  */
4859  else
4860    {
4861      switch (code)
4862	{
4863	case EQ:
4864	  /* References to the frame plus a constant or labels cannot
4865	     be zero, but a SYMBOL_REF can due to #pragma weak.  */
4866	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4867	       || GET_CODE (op0) == LABEL_REF)
4868#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4869	      /* On some machines, the ap reg can be 0 sometimes.  */
4870	      && op0 != arg_pointer_rtx
4871#endif
4872		)
4873	    return const0_rtx;
4874	  break;
4875
4876	case NE:
4877	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4878	       || GET_CODE (op0) == LABEL_REF)
4879#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4880	      && op0 != arg_pointer_rtx
4881#endif
4882	      )
4883	    return const_true_rtx;
4884	  break;
4885
4886	case GEU:
4887	  /* Unsigned values are never negative.  */
4888	  if (op1 == const0_rtx)
4889	    return const_true_rtx;
4890	  break;
4891
4892	case LTU:
4893	  if (op1 == const0_rtx)
4894	    return const0_rtx;
4895	  break;
4896
4897	case LEU:
4898	  /* Unsigned values are never greater than the largest
4899	     unsigned value.  */
4900	  if (GET_CODE (op1) == CONST_INT
4901	      && INTVAL (op1) == GET_MODE_MASK (mode)
4902	    && INTEGRAL_MODE_P (mode))
4903	  return const_true_rtx;
4904	  break;
4905
4906	case GTU:
4907	  if (GET_CODE (op1) == CONST_INT
4908	      && INTVAL (op1) == GET_MODE_MASK (mode)
4909	      && INTEGRAL_MODE_P (mode))
4910	    return const0_rtx;
4911	  break;
4912
4913	default:
4914	  break;
4915	}
4916
4917      return 0;
4918    }
4919
4920  /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4921     as appropriate.  */
4922  switch (code)
4923    {
4924    case EQ:
4925      return equal ? const_true_rtx : const0_rtx;
4926    case NE:
4927      return ! equal ? const_true_rtx : const0_rtx;
4928    case LT:
4929      return op0lt ? const_true_rtx : const0_rtx;
4930    case GT:
4931      return op1lt ? const_true_rtx : const0_rtx;
4932    case LTU:
4933      return op0ltu ? const_true_rtx : const0_rtx;
4934    case GTU:
4935      return op1ltu ? const_true_rtx : const0_rtx;
4936    case LE:
4937      return equal || op0lt ? const_true_rtx : const0_rtx;
4938    case GE:
4939      return equal || op1lt ? const_true_rtx : const0_rtx;
4940    case LEU:
4941      return equal || op0ltu ? const_true_rtx : const0_rtx;
4942    case GEU:
4943      return equal || op1ltu ? const_true_rtx : const0_rtx;
4944    default:
4945      abort ();
4946    }
4947}
4948
4949/* Simplify CODE, an operation with result mode MODE and three operands,
4950   OP0, OP1, and OP2.  OP0_MODE was the mode of OP0 before it became
4951   a constant.  Return 0 if no simplifications is possible.  */
4952
4953rtx
4954simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4955     enum rtx_code code;
4956     enum machine_mode mode, op0_mode;
4957     rtx op0, op1, op2;
4958{
4959  int width = GET_MODE_BITSIZE (mode);
4960
4961  /* VOIDmode means "infinite" precision.  */
4962  if (width == 0)
4963    width = HOST_BITS_PER_WIDE_INT;
4964
4965  switch (code)
4966    {
4967    case SIGN_EXTRACT:
4968    case ZERO_EXTRACT:
4969      if (GET_CODE (op0) == CONST_INT
4970	  && GET_CODE (op1) == CONST_INT
4971	  && GET_CODE (op2) == CONST_INT
4972	  && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4973	  && width <= HOST_BITS_PER_WIDE_INT)
4974	{
4975	  /* Extracting a bit-field from a constant */
4976	  HOST_WIDE_INT val = INTVAL (op0);
4977
4978	  if (BITS_BIG_ENDIAN)
4979	    val >>= (GET_MODE_BITSIZE (op0_mode)
4980		     - INTVAL (op2) - INTVAL (op1));
4981	  else
4982	    val >>= INTVAL (op2);
4983
4984	  if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4985	    {
4986	      /* First zero-extend.  */
4987	      val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4988	      /* If desired, propagate sign bit.  */
4989	      if (code == SIGN_EXTRACT
4990		  && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4991		val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4992	    }
4993
4994	  /* Clear the bits that don't belong in our mode,
4995	     unless they and our sign bit are all one.
4996	     So we get either a reasonable negative value or a reasonable
4997	     unsigned value for this mode.  */
4998	  if (width < HOST_BITS_PER_WIDE_INT
4999	      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
5000		  != ((HOST_WIDE_INT) (-1) << (width - 1))))
5001	    val &= ((HOST_WIDE_INT) 1 << width) - 1;
5002
5003	  return GEN_INT (val);
5004	}
5005      break;
5006
5007    case IF_THEN_ELSE:
5008      if (GET_CODE (op0) == CONST_INT)
5009	return op0 != const0_rtx ? op1 : op2;
5010
5011      /* Convert a == b ? b : a to "a".  */
5012      if (GET_CODE (op0) == NE && ! side_effects_p (op0)
5013	  && rtx_equal_p (XEXP (op0, 0), op1)
5014	  && rtx_equal_p (XEXP (op0, 1), op2))
5015	return op1;
5016      else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
5017	  && rtx_equal_p (XEXP (op0, 1), op1)
5018	  && rtx_equal_p (XEXP (op0, 0), op2))
5019	return op2;
5020      else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
5021	{
5022	  rtx temp;
5023	  temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
5024						XEXP (op0, 0), XEXP (op0, 1));
5025	  /* See if any simplifications were possible.  */
5026	  if (temp == const0_rtx)
5027	    return op2;
5028	  else if (temp == const1_rtx)
5029	    return op1;
5030	}
5031      break;
5032
5033    default:
5034      abort ();
5035    }
5036
5037  return 0;
5038}
5039
5040/* If X is a nontrivial arithmetic operation on an argument
5041   for which a constant value can be determined, return
5042   the result of operating on that value, as a constant.
5043   Otherwise, return X, possibly with one or more operands
5044   modified by recursive calls to this function.
5045
5046   If X is a register whose contents are known, we do NOT
5047   return those contents here.  equiv_constant is called to
5048   perform that task.
5049
5050   INSN is the insn that we may be modifying.  If it is 0, make a copy
5051   of X before modifying it.  */
5052
5053static rtx
5054fold_rtx (x, insn)
5055     rtx x;
5056     rtx insn;
5057{
5058  register enum rtx_code code;
5059  register enum machine_mode mode;
5060  register char *fmt;
5061  register int i;
5062  rtx new = 0;
5063  int copied = 0;
5064  int must_swap = 0;
5065
5066  /* Folded equivalents of first two operands of X.  */
5067  rtx folded_arg0;
5068  rtx folded_arg1;
5069
5070  /* Constant equivalents of first three operands of X;
5071     0 when no such equivalent is known.  */
5072  rtx const_arg0;
5073  rtx const_arg1;
5074  rtx const_arg2;
5075
5076  /* The mode of the first operand of X.  We need this for sign and zero
5077     extends.  */
5078  enum machine_mode mode_arg0;
5079
5080  if (x == 0)
5081    return x;
5082
5083  mode = GET_MODE (x);
5084  code = GET_CODE (x);
5085  switch (code)
5086    {
5087    case CONST:
5088    case CONST_INT:
5089    case CONST_DOUBLE:
5090    case SYMBOL_REF:
5091    case LABEL_REF:
5092    case REG:
5093      /* No use simplifying an EXPR_LIST
5094	 since they are used only for lists of args
5095	 in a function call's REG_EQUAL note.  */
5096    case EXPR_LIST:
5097      /* Changing anything inside an ADDRESSOF is incorrect; we don't
5098	 want to (e.g.,) make (addressof (const_int 0)) just because
5099	 the location is known to be zero.  */
5100    case ADDRESSOF:
5101      return x;
5102
5103#ifdef HAVE_cc0
5104    case CC0:
5105      return prev_insn_cc0;
5106#endif
5107
5108    case PC:
5109      /* If the next insn is a CODE_LABEL followed by a jump table,
5110	 PC's value is a LABEL_REF pointing to that label.  That
5111	 lets us fold switch statements on the Vax.  */
5112      if (insn && GET_CODE (insn) == JUMP_INSN)
5113	{
5114	  rtx next = next_nonnote_insn (insn);
5115
5116	  if (next && GET_CODE (next) == CODE_LABEL
5117	      && NEXT_INSN (next) != 0
5118	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5119	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5120		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
5121	    return gen_rtx_LABEL_REF (Pmode, next);
5122	}
5123      break;
5124
5125    case SUBREG:
5126      /* See if we previously assigned a constant value to this SUBREG.  */
5127      if ((new = lookup_as_function (x, CONST_INT)) != 0
5128	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
5129	return new;
5130
5131      /* If this is a paradoxical SUBREG, we have no idea what value the
5132	 extra bits would have.  However, if the operand is equivalent
5133	 to a SUBREG whose operand is the same as our mode, and all the
5134	 modes are within a word, we can just use the inner operand
5135	 because these SUBREGs just say how to treat the register.
5136
5137	 Similarly if we find an integer constant.  */
5138
5139      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5140	{
5141	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5142	  struct table_elt *elt;
5143
5144	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5145	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5146	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5147				imode)) != 0)
5148	    for (elt = elt->first_same_value;
5149		 elt; elt = elt->next_same_value)
5150	      {
5151		if (CONSTANT_P (elt->exp)
5152		    && GET_MODE (elt->exp) == VOIDmode)
5153		  return elt->exp;
5154
5155		if (GET_CODE (elt->exp) == SUBREG
5156		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
5157		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5158		  return copy_rtx (SUBREG_REG (elt->exp));
5159	    }
5160
5161	  return x;
5162	}
5163
5164      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
5165	 We might be able to if the SUBREG is extracting a single word in an
5166	 integral mode or extracting the low part.  */
5167
5168      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5169      const_arg0 = equiv_constant (folded_arg0);
5170      if (const_arg0)
5171	folded_arg0 = const_arg0;
5172
5173      if (folded_arg0 != SUBREG_REG (x))
5174	{
5175	  new = 0;
5176
5177	  if (GET_MODE_CLASS (mode) == MODE_INT
5178	      && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5179	      && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5180	    new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5181				   GET_MODE (SUBREG_REG (x)));
5182	  if (new == 0 && subreg_lowpart_p (x))
5183	    new = gen_lowpart_if_possible (mode, folded_arg0);
5184	  if (new)
5185	    return new;
5186	}
5187
5188      /* If this is a narrowing SUBREG and our operand is a REG, see if
5189	 we can find an equivalence for REG that is an arithmetic operation
5190	 in a wider mode where both operands are paradoxical SUBREGs
5191	 from objects of our result mode.  In that case, we couldn't report
5192	 an equivalent value for that operation, since we don't know what the
5193	 extra bits will be.  But we can find an equivalence for this SUBREG
5194	 by folding that operation is the narrow mode.  This allows us to
5195	 fold arithmetic in narrow modes when the machine only supports
5196	 word-sized arithmetic.
5197
5198	 Also look for a case where we have a SUBREG whose operand is the
5199	 same as our result.  If both modes are smaller than a word, we
5200	 are simply interpreting a register in different modes and we
5201	 can use the inner value.  */
5202
5203      if (GET_CODE (folded_arg0) == REG
5204	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5205	  && subreg_lowpart_p (x))
5206	{
5207	  struct table_elt *elt;
5208
5209	  /* We can use HASH here since we know that canon_hash won't be
5210	     called.  */
5211	  elt = lookup (folded_arg0,
5212			HASH (folded_arg0, GET_MODE (folded_arg0)),
5213			GET_MODE (folded_arg0));
5214
5215	  if (elt)
5216	    elt = elt->first_same_value;
5217
5218	  for (; elt; elt = elt->next_same_value)
5219	    {
5220	      enum rtx_code eltcode = GET_CODE (elt->exp);
5221
5222	      /* Just check for unary and binary operations.  */
5223	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5224		  && GET_CODE (elt->exp) != SIGN_EXTEND
5225		  && GET_CODE (elt->exp) != ZERO_EXTEND
5226		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5227		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5228		{
5229		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5230
5231		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5232		    op0 = fold_rtx (op0, NULL_RTX);
5233
5234		  op0 = equiv_constant (op0);
5235		  if (op0)
5236		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5237						    op0, mode);
5238		}
5239	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5240			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
5241		       && eltcode != DIV && eltcode != MOD
5242		       && eltcode != UDIV && eltcode != UMOD
5243		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5244		       && eltcode != ROTATE && eltcode != ROTATERT
5245		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5246			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5247				== mode))
5248			   || CONSTANT_P (XEXP (elt->exp, 0)))
5249		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5250			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5251				== mode))
5252			   || CONSTANT_P (XEXP (elt->exp, 1))))
5253		{
5254		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5255		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5256
5257		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5258		    op0 = fold_rtx (op0, NULL_RTX);
5259
5260		  if (op0)
5261		    op0 = equiv_constant (op0);
5262
5263		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5264		    op1 = fold_rtx (op1, NULL_RTX);
5265
5266		  if (op1)
5267		    op1 = equiv_constant (op1);
5268
5269		  /* If we are looking for the low SImode part of
5270		     (ashift:DI c (const_int 32)), it doesn't work
5271		     to compute that in SImode, because a 32-bit shift
5272		     in SImode is unpredictable.  We know the value is 0.  */
5273		  if (op0 && op1
5274		      && GET_CODE (elt->exp) == ASHIFT
5275		      && GET_CODE (op1) == CONST_INT
5276		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5277		    {
5278		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5279
5280			/* If the count fits in the inner mode's width,
5281			   but exceeds the outer mode's width,
5282			   the value will get truncated to 0
5283			   by the subreg.  */
5284			new = const0_rtx;
5285		      else
5286			/* If the count exceeds even the inner mode's width,
5287			   don't fold this expression.  */
5288			new = 0;
5289		    }
5290		  else if (op0 && op1)
5291		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5292						     op0, op1);
5293		}
5294
5295	      else if (GET_CODE (elt->exp) == SUBREG
5296		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
5297		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5298			   <= UNITS_PER_WORD)
5299		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5300		new = copy_rtx (SUBREG_REG (elt->exp));
5301
5302	      if (new)
5303		return new;
5304	    }
5305	}
5306
5307      return x;
5308
5309    case NOT:
5310    case NEG:
5311      /* If we have (NOT Y), see if Y is known to be (NOT Z).
5312	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
5313      new = lookup_as_function (XEXP (x, 0), code);
5314      if (new)
5315	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5316      break;
5317
5318    case MEM:
5319      /* If we are not actually processing an insn, don't try to find the
5320	 best address.  Not only don't we care, but we could modify the
5321	 MEM in an invalid way since we have no insn to validate against.  */
5322      if (insn != 0)
5323	find_best_addr (insn, &XEXP (x, 0));
5324
5325      {
5326	/* Even if we don't fold in the insn itself,
5327	   we can safely do so here, in hopes of getting a constant.  */
5328	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5329	rtx base = 0;
5330	HOST_WIDE_INT offset = 0;
5331
5332	if (GET_CODE (addr) == REG
5333	    && REGNO_QTY_VALID_P (REGNO (addr))
5334	    && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5335	    && qty_const[REG_QTY (REGNO (addr))] != 0)
5336	  addr = qty_const[REG_QTY (REGNO (addr))];
5337
5338	/* If address is constant, split it into a base and integer offset.  */
5339	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5340	  base = addr;
5341	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5342		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5343	  {
5344	    base = XEXP (XEXP (addr, 0), 0);
5345	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5346	  }
5347	else if (GET_CODE (addr) == LO_SUM
5348		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5349	  base = XEXP (addr, 1);
5350	else if (GET_CODE (addr) == ADDRESSOF)
5351	  return change_address (x, VOIDmode, addr);
5352
5353	/* If this is a constant pool reference, we can fold it into its
5354	   constant to allow better value tracking.  */
5355	if (base && GET_CODE (base) == SYMBOL_REF
5356	    && CONSTANT_POOL_ADDRESS_P (base))
5357	  {
5358	    rtx constant = get_pool_constant (base);
5359	    enum machine_mode const_mode = get_pool_mode (base);
5360	    rtx new;
5361
5362	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5363	      constant_pool_entries_cost = COST (constant);
5364
5365	    /* If we are loading the full constant, we have an equivalence.  */
5366	    if (offset == 0 && mode == const_mode)
5367	      return constant;
5368
5369	    /* If this actually isn't a constant (weird!), we can't do
5370	       anything.  Otherwise, handle the two most common cases:
5371	       extracting a word from a multi-word constant, and extracting
5372	       the low-order bits.  Other cases don't seem common enough to
5373	       worry about.  */
5374	    if (! CONSTANT_P (constant))
5375	      return x;
5376
5377	    if (GET_MODE_CLASS (mode) == MODE_INT
5378		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
5379		&& offset % UNITS_PER_WORD == 0
5380		&& (new = operand_subword (constant,
5381					   offset / UNITS_PER_WORD,
5382					   0, const_mode)) != 0)
5383	      return new;
5384
5385	    if (((BYTES_BIG_ENDIAN
5386		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5387		 || (! BYTES_BIG_ENDIAN && offset == 0))
5388		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
5389	      return new;
5390	  }
5391
5392	/* If this is a reference to a label at a known position in a jump
5393	   table, we also know its value.  */
5394	if (base && GET_CODE (base) == LABEL_REF)
5395	  {
5396	    rtx label = XEXP (base, 0);
5397	    rtx table_insn = NEXT_INSN (label);
5398
5399	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5400		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5401	      {
5402		rtx table = PATTERN (table_insn);
5403
5404		if (offset >= 0
5405		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5406			< XVECLEN (table, 0)))
5407		  return XVECEXP (table, 0,
5408				  offset / GET_MODE_SIZE (GET_MODE (table)));
5409	      }
5410	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5411		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5412	      {
5413		rtx table = PATTERN (table_insn);
5414
5415		if (offset >= 0
5416		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5417			< XVECLEN (table, 1)))
5418		  {
5419		    offset /= GET_MODE_SIZE (GET_MODE (table));
5420		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5421					 XEXP (table, 0));
5422
5423		    if (GET_MODE (table) != Pmode)
5424		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5425
5426		    /* Indicate this is a constant.  This isn't a
5427		       valid form of CONST, but it will only be used
5428		       to fold the next insns and then discarded, so
5429		       it should be safe.
5430
5431		       Note this expression must be explicitly discarded,
5432		       by cse_insn, else it may end up in a REG_EQUAL note
5433		       and "escape" to cause problems elsewhere.  */
5434		    return gen_rtx_CONST (GET_MODE (new), new);
5435		  }
5436	      }
5437	  }
5438
5439	return x;
5440      }
5441
5442    case ASM_OPERANDS:
5443      for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5444	validate_change (insn, &XVECEXP (x, 3, i),
5445			 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5446      break;
5447
5448    default:
5449      break;
5450    }
5451
5452  const_arg0 = 0;
5453  const_arg1 = 0;
5454  const_arg2 = 0;
5455  mode_arg0 = VOIDmode;
5456
5457  /* Try folding our operands.
5458     Then see which ones have constant values known.  */
5459
5460  fmt = GET_RTX_FORMAT (code);
5461  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5462    if (fmt[i] == 'e')
5463      {
5464	rtx arg = XEXP (x, i);
5465	rtx folded_arg = arg, const_arg = 0;
5466	enum machine_mode mode_arg = GET_MODE (arg);
5467	rtx cheap_arg, expensive_arg;
5468	rtx replacements[2];
5469	int j;
5470
5471	/* Most arguments are cheap, so handle them specially.  */
5472	switch (GET_CODE (arg))
5473	  {
5474	  case REG:
5475	    /* This is the same as calling equiv_constant; it is duplicated
5476	       here for speed.  */
5477	    if (REGNO_QTY_VALID_P (REGNO (arg))
5478		&& qty_const[REG_QTY (REGNO (arg))] != 0
5479		&& GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5480		&& GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
5481	      const_arg
5482		= gen_lowpart_if_possible (GET_MODE (arg),
5483					   qty_const[REG_QTY (REGNO (arg))]);
5484	    break;
5485
5486	  case CONST:
5487	  case CONST_INT:
5488	  case SYMBOL_REF:
5489	  case LABEL_REF:
5490	  case CONST_DOUBLE:
5491	    const_arg = arg;
5492	    break;
5493
5494#ifdef HAVE_cc0
5495	  case CC0:
5496	    folded_arg = prev_insn_cc0;
5497	    mode_arg = prev_insn_cc0_mode;
5498	    const_arg = equiv_constant (folded_arg);
5499	    break;
5500#endif
5501
5502	  default:
5503	    folded_arg = fold_rtx (arg, insn);
5504	    const_arg = equiv_constant (folded_arg);
5505	  }
5506
5507	/* For the first three operands, see if the operand
5508	   is constant or equivalent to a constant.  */
5509	switch (i)
5510	  {
5511	  case 0:
5512	    folded_arg0 = folded_arg;
5513	    const_arg0 = const_arg;
5514	    mode_arg0 = mode_arg;
5515	    break;
5516	  case 1:
5517	    folded_arg1 = folded_arg;
5518	    const_arg1 = const_arg;
5519	    break;
5520	  case 2:
5521	    const_arg2 = const_arg;
5522	    break;
5523	  }
5524
5525	/* Pick the least expensive of the folded argument and an
5526	   equivalent constant argument.  */
5527	if (const_arg == 0 || const_arg == folded_arg
5528	    || COST (const_arg) > COST (folded_arg))
5529	  cheap_arg = folded_arg, expensive_arg = const_arg;
5530	else
5531	  cheap_arg = const_arg, expensive_arg = folded_arg;
5532
5533	/* Try to replace the operand with the cheapest of the two
5534	   possibilities.  If it doesn't work and this is either of the first
5535	   two operands of a commutative operation, try swapping them.
5536	   If THAT fails, try the more expensive, provided it is cheaper
5537	   than what is already there.  */
5538
5539	if (cheap_arg == XEXP (x, i))
5540	  continue;
5541
5542	if (insn == 0 && ! copied)
5543	  {
5544	    x = copy_rtx (x);
5545	    copied = 1;
5546	  }
5547
5548	replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5549	for (j = 0;
5550	     j < 2 && replacements[j]
5551	     && COST (replacements[j]) < COST (XEXP (x, i));
5552	     j++)
5553	  {
5554	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5555	      break;
5556
5557	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5558	      {
5559		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5560		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5561
5562		if (apply_change_group ())
5563		  {
5564		    /* Swap them back to be invalid so that this loop can
5565		       continue and flag them to be swapped back later.  */
5566		    rtx tem;
5567
5568		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5569				       XEXP (x, 1) = tem;
5570		    must_swap = 1;
5571		    break;
5572		  }
5573	      }
5574	  }
5575      }
5576
5577    else
5578      {
5579	if (fmt[i] == 'E')
5580	  /* Don't try to fold inside of a vector of expressions.
5581	     Doing nothing is harmless.  */
5582	  {;}
5583      }
5584
5585  /* If a commutative operation, place a constant integer as the second
5586     operand unless the first operand is also a constant integer.  Otherwise,
5587     place any constant second unless the first operand is also a constant.  */
5588
5589  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5590    {
5591      if (must_swap || (const_arg0
5592	  		&& (const_arg1 == 0
5593	      		    || (GET_CODE (const_arg0) == CONST_INT
5594			        && GET_CODE (const_arg1) != CONST_INT))))
5595	{
5596	  register rtx tem = XEXP (x, 0);
5597
5598	  if (insn == 0 && ! copied)
5599	    {
5600	      x = copy_rtx (x);
5601	      copied = 1;
5602	    }
5603
5604	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5605	  validate_change (insn, &XEXP (x, 1), tem, 1);
5606	  if (apply_change_group ())
5607	    {
5608	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5609	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5610	    }
5611	}
5612    }
5613
5614  /* If X is an arithmetic operation, see if we can simplify it.  */
5615
5616  switch (GET_RTX_CLASS (code))
5617    {
5618    case '1':
5619      {
5620	int is_const = 0;
5621
5622	/* We can't simplify extension ops unless we know the
5623	   original mode.  */
5624	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5625	    && mode_arg0 == VOIDmode)
5626	  break;
5627
5628	/* If we had a CONST, strip it off and put it back later if we
5629	   fold.  */
5630	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5631	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5632
5633	new = simplify_unary_operation (code, mode,
5634					const_arg0 ? const_arg0 : folded_arg0,
5635					mode_arg0);
5636	if (new != 0 && is_const)
5637	  new = gen_rtx_CONST (mode, new);
5638      }
5639      break;
5640
5641    case '<':
5642      /* See what items are actually being compared and set FOLDED_ARG[01]
5643	 to those values and CODE to the actual comparison code.  If any are
5644	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
5645	 do anything if both operands are already known to be constant.  */
5646
5647      if (const_arg0 == 0 || const_arg1 == 0)
5648	{
5649	  struct table_elt *p0, *p1;
5650	  rtx true = const_true_rtx, false = const0_rtx;
5651	  enum machine_mode mode_arg1;
5652
5653#ifdef FLOAT_STORE_FLAG_VALUE
5654	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5655	    {
5656	      true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5657						   mode);
5658	      false = CONST0_RTX (mode);
5659	    }
5660#endif
5661
5662	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5663				       &mode_arg0, &mode_arg1);
5664	  const_arg0 = equiv_constant (folded_arg0);
5665	  const_arg1 = equiv_constant (folded_arg1);
5666
5667	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5668	     what kinds of things are being compared, so we can't do
5669	     anything with this comparison.  */
5670
5671	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5672	    break;
5673
5674	  /* If we do not now have two constants being compared, see
5675	     if we can nevertheless deduce some things about the
5676	     comparison.  */
5677	  if (const_arg0 == 0 || const_arg1 == 0)
5678	    {
5679	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
5680		 non-explicit constant?  These aren't zero, but we
5681		 don't know their sign.  */
5682	      if (const_arg1 == const0_rtx
5683		  && (NONZERO_BASE_PLUS_P (folded_arg0)
5684#if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5685	  come out as 0.  */
5686		      || GET_CODE (folded_arg0) == SYMBOL_REF
5687#endif
5688		      || GET_CODE (folded_arg0) == LABEL_REF
5689		      || GET_CODE (folded_arg0) == CONST))
5690		{
5691		  if (code == EQ)
5692		    return false;
5693		  else if (code == NE)
5694		    return true;
5695		}
5696
5697	      /* See if the two operands are the same.  We don't do this
5698		 for IEEE floating-point since we can't assume x == x
5699		 since x might be a NaN.  */
5700
5701	      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5702		   || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5703		  && (folded_arg0 == folded_arg1
5704		      || (GET_CODE (folded_arg0) == REG
5705			  && GET_CODE (folded_arg1) == REG
5706			  && (REG_QTY (REGNO (folded_arg0))
5707			      == REG_QTY (REGNO (folded_arg1))))
5708		      || ((p0 = lookup (folded_arg0,
5709					(safe_hash (folded_arg0, mode_arg0)
5710					 % NBUCKETS), mode_arg0))
5711			  && (p1 = lookup (folded_arg1,
5712					   (safe_hash (folded_arg1, mode_arg0)
5713					    % NBUCKETS), mode_arg0))
5714			  && p0->first_same_value == p1->first_same_value)))
5715		return ((code == EQ || code == LE || code == GE
5716			 || code == LEU || code == GEU)
5717			? true : false);
5718
5719	      /* If FOLDED_ARG0 is a register, see if the comparison we are
5720		 doing now is either the same as we did before or the reverse
5721		 (we only check the reverse if not floating-point).  */
5722	      else if (GET_CODE (folded_arg0) == REG)
5723		{
5724		  int qty = REG_QTY (REGNO (folded_arg0));
5725
5726		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5727		      && (comparison_dominates_p (qty_comparison_code[qty], code)
5728			  || (comparison_dominates_p (qty_comparison_code[qty],
5729						      reverse_condition (code))
5730			      && ! FLOAT_MODE_P (mode_arg0)))
5731		      && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5732			  || (const_arg1
5733			      && rtx_equal_p (qty_comparison_const[qty],
5734					      const_arg1))
5735			  || (GET_CODE (folded_arg1) == REG
5736			      && (REG_QTY (REGNO (folded_arg1))
5737				  == qty_comparison_qty[qty]))))
5738		    return (comparison_dominates_p (qty_comparison_code[qty],
5739						    code)
5740			    ? true : false);
5741		}
5742	    }
5743	}
5744
5745      /* If we are comparing against zero, see if the first operand is
5746	 equivalent to an IOR with a constant.  If so, we may be able to
5747	 determine the result of this comparison.  */
5748
5749      if (const_arg1 == const0_rtx)
5750	{
5751	  rtx y = lookup_as_function (folded_arg0, IOR);
5752	  rtx inner_const;
5753
5754	  if (y != 0
5755	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5756	      && GET_CODE (inner_const) == CONST_INT
5757	      && INTVAL (inner_const) != 0)
5758	    {
5759	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5760	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5761			      && (INTVAL (inner_const)
5762				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5763	      rtx true = const_true_rtx, false = const0_rtx;
5764
5765#ifdef FLOAT_STORE_FLAG_VALUE
5766	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5767		{
5768		  true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5769						       mode);
5770		  false = CONST0_RTX (mode);
5771		}
5772#endif
5773
5774	      switch (code)
5775		{
5776		case EQ:
5777		  return false;
5778		case NE:
5779		  return true;
5780		case LT:  case LE:
5781		  if (has_sign)
5782		    return true;
5783		  break;
5784		case GT:  case GE:
5785		  if (has_sign)
5786		    return false;
5787		  break;
5788		default:
5789		  break;
5790		}
5791	    }
5792	}
5793
5794      new = simplify_relational_operation (code, mode_arg0,
5795					   const_arg0 ? const_arg0 : folded_arg0,
5796					   const_arg1 ? const_arg1 : folded_arg1);
5797#ifdef FLOAT_STORE_FLAG_VALUE
5798      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5799	new = ((new == const0_rtx) ? CONST0_RTX (mode)
5800	       : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5801#endif
5802      break;
5803
5804    case '2':
5805    case 'c':
5806      switch (code)
5807	{
5808	case PLUS:
5809	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
5810	     with that LABEL_REF as its second operand.  If so, the result is
5811	     the first operand of that MINUS.  This handles switches with an
5812	     ADDR_DIFF_VEC table.  */
5813	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5814	    {
5815	      rtx y
5816		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
5817		  : lookup_as_function (folded_arg0, MINUS);
5818
5819	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5820		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5821		return XEXP (y, 0);
5822
5823	      /* Now try for a CONST of a MINUS like the above.  */
5824	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5825			: lookup_as_function (folded_arg0, CONST))) != 0
5826		  && GET_CODE (XEXP (y, 0)) == MINUS
5827		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5828		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5829		return XEXP (XEXP (y, 0), 0);
5830	    }
5831
5832	  /* Likewise if the operands are in the other order.  */
5833	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5834	    {
5835	      rtx y
5836		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
5837		  : lookup_as_function (folded_arg1, MINUS);
5838
5839	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5840		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5841		return XEXP (y, 0);
5842
5843	      /* Now try for a CONST of a MINUS like the above.  */
5844	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5845			: lookup_as_function (folded_arg1, CONST))) != 0
5846		  && GET_CODE (XEXP (y, 0)) == MINUS
5847		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5848		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5849		return XEXP (XEXP (y, 0), 0);
5850	    }
5851
5852	  /* If second operand is a register equivalent to a negative
5853	     CONST_INT, see if we can find a register equivalent to the
5854	     positive constant.  Make a MINUS if so.  Don't do this for
5855	     a non-negative constant since we might then alternate between
5856	     chosing positive and negative constants.  Having the positive
5857	     constant previously-used is the more common case.  Be sure
5858	     the resulting constant is non-negative; if const_arg1 were
5859	     the smallest negative number this would overflow: depending
5860	     on the mode, this would either just be the same value (and
5861	     hence not save anything) or be incorrect.  */
5862	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5863	      && INTVAL (const_arg1) < 0
5864	      && - INTVAL (const_arg1) >= 0
5865	      && GET_CODE (folded_arg1) == REG)
5866	    {
5867	      rtx new_const = GEN_INT (- INTVAL (const_arg1));
5868	      struct table_elt *p
5869		= lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5870			  mode);
5871
5872	      if (p)
5873		for (p = p->first_same_value; p; p = p->next_same_value)
5874		  if (GET_CODE (p->exp) == REG)
5875		    return cse_gen_binary (MINUS, mode, folded_arg0,
5876					   canon_reg (p->exp, NULL_RTX));
5877	    }
5878	  goto from_plus;
5879
5880	case MINUS:
5881	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5882	     If so, produce (PLUS Z C2-C).  */
5883	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5884	    {
5885	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5886	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5887		return fold_rtx (plus_constant (copy_rtx (y),
5888						-INTVAL (const_arg1)),
5889				 NULL_RTX);
5890	    }
5891
5892	  /* ... fall through ...  */
5893
5894	from_plus:
5895	case SMIN:    case SMAX:      case UMIN:    case UMAX:
5896	case IOR:     case AND:       case XOR:
5897	case MULT:    case DIV:       case UDIV:
5898	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
5899	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5900	     is known to be of similar form, we may be able to replace the
5901	     operation with a combined operation.  This may eliminate the
5902	     intermediate operation if every use is simplified in this way.
5903	     Note that the similar optimization done by combine.c only works
5904	     if the intermediate operation's result has only one reference.  */
5905
5906	  if (GET_CODE (folded_arg0) == REG
5907	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5908	    {
5909	      int is_shift
5910		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5911	      rtx y = lookup_as_function (folded_arg0, code);
5912	      rtx inner_const;
5913	      enum rtx_code associate_code;
5914	      rtx new_const;
5915
5916	      if (y == 0
5917		  || 0 == (inner_const
5918			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5919		  || GET_CODE (inner_const) != CONST_INT
5920		  /* If we have compiled a statement like
5921		     "if (x == (x & mask1))", and now are looking at
5922		     "x & mask2", we will have a case where the first operand
5923		     of Y is the same as our first operand.  Unless we detect
5924		     this case, an infinite loop will result.  */
5925		  || XEXP (y, 0) == folded_arg0)
5926		break;
5927
5928	      /* Don't associate these operations if they are a PLUS with the
5929		 same constant and it is a power of two.  These might be doable
5930		 with a pre- or post-increment.  Similarly for two subtracts of
5931		 identical powers of two with post decrement.  */
5932
5933	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5934		  && ((HAVE_PRE_INCREMENT
5935			  && exact_log2 (INTVAL (const_arg1)) >= 0)
5936		      || (HAVE_POST_INCREMENT
5937			  && exact_log2 (INTVAL (const_arg1)) >= 0)
5938		      || (HAVE_PRE_DECREMENT
5939			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
5940		      || (HAVE_POST_DECREMENT
5941			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
5942		break;
5943
5944	      /* Compute the code used to compose the constants.  For example,
5945		 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT.  */
5946
5947	      associate_code
5948		= (code == MULT || code == DIV || code == UDIV ? MULT
5949		   : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5950
5951	      new_const = simplify_binary_operation (associate_code, mode,
5952						     const_arg1, inner_const);
5953
5954	      if (new_const == 0)
5955		break;
5956
5957	      /* If we are associating shift operations, don't let this
5958		 produce a shift of the size of the object or larger.
5959		 This could occur when we follow a sign-extend by a right
5960		 shift on a machine that does a sign-extend as a pair
5961		 of shifts.  */
5962
5963	      if (is_shift && GET_CODE (new_const) == CONST_INT
5964		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5965		{
5966		  /* As an exception, we can turn an ASHIFTRT of this
5967		     form into a shift of the number of bits - 1.  */
5968		  if (code == ASHIFTRT)
5969		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5970		  else
5971		    break;
5972		}
5973
5974	      y = copy_rtx (XEXP (y, 0));
5975
5976	      /* If Y contains our first operand (the most common way this
5977		 can happen is if Y is a MEM), we would do into an infinite
5978		 loop if we tried to fold it.  So don't in that case.  */
5979
5980	      if (! reg_mentioned_p (folded_arg0, y))
5981		y = fold_rtx (y, insn);
5982
5983	      return cse_gen_binary (code, mode, y, new_const);
5984	    }
5985	  break;
5986
5987	default:
5988	  break;
5989	}
5990
5991      new = simplify_binary_operation (code, mode,
5992				       const_arg0 ? const_arg0 : folded_arg0,
5993				       const_arg1 ? const_arg1 : folded_arg1);
5994      break;
5995
5996    case 'o':
5997      /* (lo_sum (high X) X) is simply X.  */
5998      if (code == LO_SUM && const_arg0 != 0
5999	  && GET_CODE (const_arg0) == HIGH
6000	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
6001	return const_arg1;
6002      break;
6003
6004    case '3':
6005    case 'b':
6006      new = simplify_ternary_operation (code, mode, mode_arg0,
6007					const_arg0 ? const_arg0 : folded_arg0,
6008					const_arg1 ? const_arg1 : folded_arg1,
6009					const_arg2 ? const_arg2 : XEXP (x, 2));
6010      break;
6011
6012    case 'x':
6013      /* Always eliminate CONSTANT_P_RTX at this stage. */
6014      if (code == CONSTANT_P_RTX)
6015	return (const_arg0 ? const1_rtx : const0_rtx);
6016      break;
6017    }
6018
6019  return new ? new : x;
6020}
6021
6022/* Return a constant value currently equivalent to X.
6023   Return 0 if we don't know one.  */
6024
6025static rtx
6026equiv_constant (x)
6027     rtx x;
6028{
6029  if (GET_CODE (x) == REG
6030      && REGNO_QTY_VALID_P (REGNO (x))
6031      && qty_const[REG_QTY (REGNO (x))])
6032    x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
6033
6034  if (x == 0 || CONSTANT_P (x))
6035    return x;
6036
6037  /* If X is a MEM, try to fold it outside the context of any insn to see if
6038     it might be equivalent to a constant.  That handles the case where it
6039     is a constant-pool reference.  Then try to look it up in the hash table
6040     in case it is something whose value we have seen before.  */
6041
6042  if (GET_CODE (x) == MEM)
6043    {
6044      struct table_elt *elt;
6045
6046      x = fold_rtx (x, NULL_RTX);
6047      if (CONSTANT_P (x))
6048	return x;
6049
6050      elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
6051      if (elt == 0)
6052	return 0;
6053
6054      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
6055	if (elt->is_const && CONSTANT_P (elt->exp))
6056	  return elt->exp;
6057    }
6058
6059  return 0;
6060}
6061
6062/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6063   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6064   least-significant part of X.
6065   MODE specifies how big a part of X to return.
6066
6067   If the requested operation cannot be done, 0 is returned.
6068
6069   This is similar to gen_lowpart in emit-rtl.c.  */
6070
6071rtx
6072gen_lowpart_if_possible (mode, x)
6073     enum machine_mode mode;
6074     register rtx x;
6075{
6076  rtx result = gen_lowpart_common (mode, x);
6077
6078  if (result)
6079    return result;
6080  else if (GET_CODE (x) == MEM)
6081    {
6082      /* This is the only other case we handle.  */
6083      register int offset = 0;
6084      rtx new;
6085
6086      if (WORDS_BIG_ENDIAN)
6087	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6088		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6089      if (BYTES_BIG_ENDIAN)
6090	/* Adjust the address so that the address-after-the-data is
6091	   unchanged.  */
6092	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6093		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6094      new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
6095      if (! memory_address_p (mode, XEXP (new, 0)))
6096	return 0;
6097      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6098      MEM_COPY_ATTRIBUTES (new, x);
6099      return new;
6100    }
6101  else
6102    return 0;
6103}
6104
6105/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6106   branch.  It will be zero if not.
6107
6108   In certain cases, this can cause us to add an equivalence.  For example,
6109   if we are following the taken case of
6110   	if (i == 2)
6111   we can add the fact that `i' and '2' are now equivalent.
6112
6113   In any case, we can record that this comparison was passed.  If the same
6114   comparison is seen later, we will know its value.  */
6115
6116static void
6117record_jump_equiv (insn, taken)
6118     rtx insn;
6119     int taken;
6120{
6121  int cond_known_true;
6122  rtx op0, op1;
6123  enum machine_mode mode, mode0, mode1;
6124  int reversed_nonequality = 0;
6125  enum rtx_code code;
6126
6127  /* Ensure this is the right kind of insn.  */
6128  if (! condjump_p (insn) || simplejump_p (insn))
6129    return;
6130
6131  /* See if this jump condition is known true or false.  */
6132  if (taken)
6133    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6134  else
6135    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6136
6137  /* Get the type of comparison being done and the operands being compared.
6138     If we had to reverse a non-equality condition, record that fact so we
6139     know that it isn't valid for floating-point.  */
6140  code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6141  op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6142  op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6143
6144  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
6145  if (! cond_known_true)
6146    {
6147      reversed_nonequality = (code != EQ && code != NE);
6148      code = reverse_condition (code);
6149    }
6150
6151  /* The mode is the mode of the non-constant.  */
6152  mode = mode0;
6153  if (mode1 != VOIDmode)
6154    mode = mode1;
6155
6156  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6157}
6158
6159/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6160   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6161   Make any useful entries we can with that information.  Called from
6162   above function and called recursively.  */
6163
6164static void
6165record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6166     enum rtx_code code;
6167     enum machine_mode mode;
6168     rtx op0, op1;
6169     int reversed_nonequality;
6170{
6171  unsigned op0_hash, op1_hash;
6172  int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6173  struct table_elt *op0_elt, *op1_elt;
6174
6175  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6176     we know that they are also equal in the smaller mode (this is also
6177     true for all smaller modes whether or not there is a SUBREG, but
6178     is not worth testing for with no SUBREG).  */
6179
6180  /* Note that GET_MODE (op0) may not equal MODE.  */
6181  if (code == EQ && GET_CODE (op0) == SUBREG
6182      && (GET_MODE_SIZE (GET_MODE (op0))
6183	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6184    {
6185      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6186      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6187
6188      record_jump_cond (code, mode, SUBREG_REG (op0),
6189			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6190			reversed_nonequality);
6191    }
6192
6193  if (code == EQ && GET_CODE (op1) == SUBREG
6194      && (GET_MODE_SIZE (GET_MODE (op1))
6195	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6196    {
6197      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6198      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6199
6200      record_jump_cond (code, mode, SUBREG_REG (op1),
6201			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6202			reversed_nonequality);
6203    }
6204
6205  /* Similarly, if this is an NE comparison, and either is a SUBREG
6206     making a smaller mode, we know the whole thing is also NE.  */
6207
6208  /* Note that GET_MODE (op0) may not equal MODE;
6209     if we test MODE instead, we can get an infinite recursion
6210     alternating between two modes each wider than MODE.  */
6211
6212  if (code == NE && GET_CODE (op0) == SUBREG
6213      && subreg_lowpart_p (op0)
6214      && (GET_MODE_SIZE (GET_MODE (op0))
6215	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6216    {
6217      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6218      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6219
6220      record_jump_cond (code, mode, SUBREG_REG (op0),
6221			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6222			reversed_nonequality);
6223    }
6224
6225  if (code == NE && GET_CODE (op1) == SUBREG
6226      && subreg_lowpart_p (op1)
6227      && (GET_MODE_SIZE (GET_MODE (op1))
6228	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6229    {
6230      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6231      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6232
6233      record_jump_cond (code, mode, SUBREG_REG (op1),
6234			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6235			reversed_nonequality);
6236    }
6237
6238  /* Hash both operands.  */
6239
6240  do_not_record = 0;
6241  hash_arg_in_memory = 0;
6242  hash_arg_in_struct = 0;
6243  op0_hash = HASH (op0, mode);
6244  op0_in_memory = hash_arg_in_memory;
6245  op0_in_struct = hash_arg_in_struct;
6246
6247  if (do_not_record)
6248    return;
6249
6250  do_not_record = 0;
6251  hash_arg_in_memory = 0;
6252  hash_arg_in_struct = 0;
6253  op1_hash = HASH (op1, mode);
6254  op1_in_memory = hash_arg_in_memory;
6255  op1_in_struct = hash_arg_in_struct;
6256
6257  if (do_not_record)
6258    return;
6259
6260  /* Look up both operands.  */
6261  op0_elt = lookup (op0, op0_hash, mode);
6262  op1_elt = lookup (op1, op1_hash, mode);
6263
6264  /* If both operands are already equivalent or if they are not in the
6265     table but are identical, do nothing.  */
6266  if ((op0_elt != 0 && op1_elt != 0
6267       && op0_elt->first_same_value == op1_elt->first_same_value)
6268      || op0 == op1 || rtx_equal_p (op0, op1))
6269    return;
6270
6271  /* If we aren't setting two things equal all we can do is save this
6272     comparison.   Similarly if this is floating-point.  In the latter
6273     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6274     If we record the equality, we might inadvertently delete code
6275     whose intent was to change -0 to +0.  */
6276
6277  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6278    {
6279      /* If we reversed a floating-point comparison, if OP0 is not a
6280	 register, or if OP1 is neither a register or constant, we can't
6281	 do anything.  */
6282
6283      if (GET_CODE (op1) != REG)
6284	op1 = equiv_constant (op1);
6285
6286      if ((reversed_nonequality && FLOAT_MODE_P (mode))
6287	  || GET_CODE (op0) != REG || op1 == 0)
6288	return;
6289
6290      /* Put OP0 in the hash table if it isn't already.  This gives it a
6291	 new quantity number.  */
6292      if (op0_elt == 0)
6293	{
6294	  if (insert_regs (op0, NULL_PTR, 0))
6295	    {
6296	      rehash_using_reg (op0);
6297	      op0_hash = HASH (op0, mode);
6298
6299	      /* If OP0 is contained in OP1, this changes its hash code
6300		 as well.  Faster to rehash than to check, except
6301		 for the simple case of a constant.  */
6302	      if (! CONSTANT_P (op1))
6303		op1_hash = HASH (op1,mode);
6304	    }
6305
6306	  op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6307	  op0_elt->in_memory = op0_in_memory;
6308	  op0_elt->in_struct = op0_in_struct;
6309	}
6310
6311      qty_comparison_code[REG_QTY (REGNO (op0))] = code;
6312      if (GET_CODE (op1) == REG)
6313	{
6314	  /* Look it up again--in case op0 and op1 are the same.  */
6315	  op1_elt = lookup (op1, op1_hash, mode);
6316
6317	  /* Put OP1 in the hash table so it gets a new quantity number.  */
6318	  if (op1_elt == 0)
6319	    {
6320	      if (insert_regs (op1, NULL_PTR, 0))
6321		{
6322		  rehash_using_reg (op1);
6323		  op1_hash = HASH (op1, mode);
6324		}
6325
6326	      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6327	      op1_elt->in_memory = op1_in_memory;
6328	      op1_elt->in_struct = op1_in_struct;
6329	    }
6330
6331	  qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6332	  qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
6333	}
6334      else
6335	{
6336	  qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6337	  qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
6338	}
6339
6340      return;
6341    }
6342
6343  /* If either side is still missing an equivalence, make it now,
6344     then merge the equivalences.  */
6345
6346  if (op0_elt == 0)
6347    {
6348      if (insert_regs (op0, NULL_PTR, 0))
6349	{
6350	  rehash_using_reg (op0);
6351	  op0_hash = HASH (op0, mode);
6352	}
6353
6354      op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6355      op0_elt->in_memory = op0_in_memory;
6356      op0_elt->in_struct = op0_in_struct;
6357    }
6358
6359  if (op1_elt == 0)
6360    {
6361      if (insert_regs (op1, NULL_PTR, 0))
6362	{
6363	  rehash_using_reg (op1);
6364	  op1_hash = HASH (op1, mode);
6365	}
6366
6367      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6368      op1_elt->in_memory = op1_in_memory;
6369      op1_elt->in_struct = op1_in_struct;
6370    }
6371
6372  merge_equiv_classes (op0_elt, op1_elt);
6373  last_jump_equiv_class = op0_elt;
6374}
6375
6376/* CSE processing for one instruction.
6377   First simplify sources and addresses of all assignments
6378   in the instruction, using previously-computed equivalents values.
6379   Then install the new sources and destinations in the table
6380   of available values.
6381
6382   If LIBCALL_INSN is nonzero, don't record any equivalence made in
6383   the insn.  It means that INSN is inside libcall block.  In this
6384   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6385
6386/* Data on one SET contained in the instruction.  */
6387
6388struct set
6389{
6390  /* The SET rtx itself.  */
6391  rtx rtl;
6392  /* The SET_SRC of the rtx (the original value, if it is changing).  */
6393  rtx src;
6394  /* The hash-table element for the SET_SRC of the SET.  */
6395  struct table_elt *src_elt;
6396  /* Hash value for the SET_SRC.  */
6397  unsigned src_hash;
6398  /* Hash value for the SET_DEST.  */
6399  unsigned dest_hash;
6400  /* The SET_DEST, with SUBREG, etc., stripped.  */
6401  rtx inner_dest;
6402  /* Place where the pointer to the INNER_DEST was found.  */
6403  rtx *inner_dest_loc;
6404  /* Nonzero if the SET_SRC is in memory.  */
6405  char src_in_memory;
6406  /* Nonzero if the SET_SRC is in a structure.  */
6407  char src_in_struct;
6408  /* Nonzero if the SET_SRC contains something
6409     whose value cannot be predicted and understood.  */
6410  char src_volatile;
6411  /* Original machine mode, in case it becomes a CONST_INT.  */
6412  enum machine_mode mode;
6413  /* A constant equivalent for SET_SRC, if any.  */
6414  rtx src_const;
6415  /* Hash value of constant equivalent for SET_SRC.  */
6416  unsigned src_const_hash;
6417  /* Table entry for constant equivalent for SET_SRC, if any.  */
6418  struct table_elt *src_const_elt;
6419};
6420
6421static void
6422cse_insn (insn, libcall_insn)
6423     rtx insn;
6424     rtx libcall_insn;
6425{
6426  register rtx x = PATTERN (insn);
6427  register int i;
6428  rtx tem;
6429  register int n_sets = 0;
6430
6431#ifdef HAVE_cc0
6432  /* Records what this insn does to set CC0.  */
6433  rtx this_insn_cc0 = 0;
6434  enum machine_mode this_insn_cc0_mode = VOIDmode;
6435#endif
6436
6437  rtx src_eqv = 0;
6438  struct table_elt *src_eqv_elt = 0;
6439  int src_eqv_volatile;
6440  int src_eqv_in_memory;
6441  int src_eqv_in_struct;
6442  unsigned src_eqv_hash;
6443
6444  struct set *sets;
6445
6446  this_insn = insn;
6447
6448  /* Find all the SETs and CLOBBERs in this instruction.
6449     Record all the SETs in the array `set' and count them.
6450     Also determine whether there is a CLOBBER that invalidates
6451     all memory references, or all references at varying addresses.  */
6452
6453  if (GET_CODE (insn) == CALL_INSN)
6454    {
6455      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6456	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6457          invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6458    }
6459
6460  if (GET_CODE (x) == SET)
6461    {
6462      sets = (struct set *) alloca (sizeof (struct set));
6463      sets[0].rtl = x;
6464
6465      /* Ignore SETs that are unconditional jumps.
6466	 They never need cse processing, so this does not hurt.
6467	 The reason is not efficiency but rather
6468	 so that we can test at the end for instructions
6469	 that have been simplified to unconditional jumps
6470	 and not be misled by unchanged instructions
6471	 that were unconditional jumps to begin with.  */
6472      if (SET_DEST (x) == pc_rtx
6473	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
6474	;
6475
6476      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6477	 The hard function value register is used only once, to copy to
6478	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6479	 Ensure we invalidate the destination register.  On the 80386 no
6480	 other code would invalidate it since it is a fixed_reg.
6481	 We need not check the return of apply_change_group; see canon_reg.  */
6482
6483      else if (GET_CODE (SET_SRC (x)) == CALL)
6484	{
6485	  canon_reg (SET_SRC (x), insn);
6486	  apply_change_group ();
6487	  fold_rtx (SET_SRC (x), insn);
6488	  invalidate (SET_DEST (x), VOIDmode);
6489	}
6490      else
6491	n_sets = 1;
6492    }
6493  else if (GET_CODE (x) == PARALLEL)
6494    {
6495      register int lim = XVECLEN (x, 0);
6496
6497      sets = (struct set *) alloca (lim * sizeof (struct set));
6498
6499      /* Find all regs explicitly clobbered in this insn,
6500	 and ensure they are not replaced with any other regs
6501	 elsewhere in this insn.
6502	 When a reg that is clobbered is also used for input,
6503	 we should presume that that is for a reason,
6504	 and we should not substitute some other register
6505	 which is not supposed to be clobbered.
6506	 Therefore, this loop cannot be merged into the one below
6507	 because a CALL may precede a CLOBBER and refer to the
6508	 value clobbered.  We must not let a canonicalization do
6509	 anything in that case.  */
6510      for (i = 0; i < lim; i++)
6511	{
6512	  register rtx y = XVECEXP (x, 0, i);
6513	  if (GET_CODE (y) == CLOBBER)
6514	    {
6515	      rtx clobbered = XEXP (y, 0);
6516
6517	      if (GET_CODE (clobbered) == REG
6518		  || GET_CODE (clobbered) == SUBREG)
6519		invalidate (clobbered, VOIDmode);
6520	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6521		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6522		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6523	    }
6524	}
6525
6526      for (i = 0; i < lim; i++)
6527	{
6528	  register rtx y = XVECEXP (x, 0, i);
6529	  if (GET_CODE (y) == SET)
6530	    {
6531	      /* As above, we ignore unconditional jumps and call-insns and
6532		 ignore the result of apply_change_group.  */
6533	      if (GET_CODE (SET_SRC (y)) == CALL)
6534		{
6535		  canon_reg (SET_SRC (y), insn);
6536		  apply_change_group ();
6537		  fold_rtx (SET_SRC (y), insn);
6538		  invalidate (SET_DEST (y), VOIDmode);
6539		}
6540	      else if (SET_DEST (y) == pc_rtx
6541		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
6542		;
6543	      else
6544		sets[n_sets++].rtl = y;
6545	    }
6546	  else if (GET_CODE (y) == CLOBBER)
6547	    {
6548	      /* If we clobber memory, canon the address.
6549		 This does nothing when a register is clobbered
6550		 because we have already invalidated the reg.  */
6551	      if (GET_CODE (XEXP (y, 0)) == MEM)
6552		canon_reg (XEXP (y, 0), NULL_RTX);
6553	    }
6554	  else if (GET_CODE (y) == USE
6555		   && ! (GET_CODE (XEXP (y, 0)) == REG
6556			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6557	    canon_reg (y, NULL_RTX);
6558	  else if (GET_CODE (y) == CALL)
6559	    {
6560	      /* The result of apply_change_group can be ignored; see
6561		 canon_reg.  */
6562	      canon_reg (y, insn);
6563	      apply_change_group ();
6564	      fold_rtx (y, insn);
6565	    }
6566	}
6567    }
6568  else if (GET_CODE (x) == CLOBBER)
6569    {
6570      if (GET_CODE (XEXP (x, 0)) == MEM)
6571	canon_reg (XEXP (x, 0), NULL_RTX);
6572    }
6573
6574  /* Canonicalize a USE of a pseudo register or memory location.  */
6575  else if (GET_CODE (x) == USE
6576	   && ! (GET_CODE (XEXP (x, 0)) == REG
6577		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6578    canon_reg (XEXP (x, 0), NULL_RTX);
6579  else if (GET_CODE (x) == CALL)
6580    {
6581      /* The result of apply_change_group can be ignored; see canon_reg.  */
6582      canon_reg (x, insn);
6583      apply_change_group ();
6584      fold_rtx (x, insn);
6585    }
6586
6587  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6588     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
6589     is handled specially for this case, and if it isn't set, then there will
6590     be no equivalence for the destination.  */
6591  if (n_sets == 1 && REG_NOTES (insn) != 0
6592      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6593      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6594	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6595    src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6596
6597  /* Canonicalize sources and addresses of destinations.
6598     We do this in a separate pass to avoid problems when a MATCH_DUP is
6599     present in the insn pattern.  In that case, we want to ensure that
6600     we don't break the duplicate nature of the pattern.  So we will replace
6601     both operands at the same time.  Otherwise, we would fail to find an
6602     equivalent substitution in the loop calling validate_change below.
6603
6604     We used to suppress canonicalization of DEST if it appears in SRC,
6605     but we don't do this any more.  */
6606
6607  for (i = 0; i < n_sets; i++)
6608    {
6609      rtx dest = SET_DEST (sets[i].rtl);
6610      rtx src = SET_SRC (sets[i].rtl);
6611      rtx new = canon_reg (src, insn);
6612      int insn_code;
6613
6614      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6615	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6616	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6617	  || (insn_code = recog_memoized (insn)) < 0
6618	  || insn_n_dups[insn_code] > 0)
6619	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6620      else
6621	SET_SRC (sets[i].rtl) = new;
6622
6623      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6624	{
6625	  validate_change (insn, &XEXP (dest, 1),
6626			   canon_reg (XEXP (dest, 1), insn), 1);
6627	  validate_change (insn, &XEXP (dest, 2),
6628			   canon_reg (XEXP (dest, 2), insn), 1);
6629	}
6630
6631      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6632	     || GET_CODE (dest) == ZERO_EXTRACT
6633	     || GET_CODE (dest) == SIGN_EXTRACT)
6634	dest = XEXP (dest, 0);
6635
6636      if (GET_CODE (dest) == MEM)
6637	canon_reg (dest, insn);
6638    }
6639
6640  /* Now that we have done all the replacements, we can apply the change
6641     group and see if they all work.  Note that this will cause some
6642     canonicalizations that would have worked individually not to be applied
6643     because some other canonicalization didn't work, but this should not
6644     occur often.
6645
6646     The result of apply_change_group can be ignored; see canon_reg.  */
6647
6648  apply_change_group ();
6649
6650  /* Set sets[i].src_elt to the class each source belongs to.
6651     Detect assignments from or to volatile things
6652     and set set[i] to zero so they will be ignored
6653     in the rest of this function.
6654
6655     Nothing in this loop changes the hash table or the register chains.  */
6656
6657  for (i = 0; i < n_sets; i++)
6658    {
6659      register rtx src, dest;
6660      register rtx src_folded;
6661      register struct table_elt *elt = 0, *p;
6662      enum machine_mode mode;
6663      rtx src_eqv_here;
6664      rtx src_const = 0;
6665      rtx src_related = 0;
6666      struct table_elt *src_const_elt = 0;
6667      int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6668      int src_related_cost = 10000, src_elt_cost = 10000;
6669      /* Set non-zero if we need to call force_const_mem on with the
6670	 contents of src_folded before using it.  */
6671      int src_folded_force_flag = 0;
6672
6673      dest = SET_DEST (sets[i].rtl);
6674      src = SET_SRC (sets[i].rtl);
6675
6676      /* If SRC is a constant that has no machine mode,
6677	 hash it with the destination's machine mode.
6678	 This way we can keep different modes separate.  */
6679
6680      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6681      sets[i].mode = mode;
6682
6683      if (src_eqv)
6684	{
6685	  enum machine_mode eqvmode = mode;
6686	  if (GET_CODE (dest) == STRICT_LOW_PART)
6687	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6688	  do_not_record = 0;
6689	  hash_arg_in_memory = 0;
6690	  hash_arg_in_struct = 0;
6691	  src_eqv = fold_rtx (src_eqv, insn);
6692	  src_eqv_hash = HASH (src_eqv, eqvmode);
6693
6694	  /* Find the equivalence class for the equivalent expression.  */
6695
6696	  if (!do_not_record)
6697	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6698
6699	  src_eqv_volatile = do_not_record;
6700	  src_eqv_in_memory = hash_arg_in_memory;
6701	  src_eqv_in_struct = hash_arg_in_struct;
6702	}
6703
6704      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6705	 value of the INNER register, not the destination.  So it is not
6706	 a valid substitution for the source.  But save it for later.  */
6707      if (GET_CODE (dest) == STRICT_LOW_PART)
6708	src_eqv_here = 0;
6709      else
6710	src_eqv_here = src_eqv;
6711
6712      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
6713	 simplified result, which may not necessarily be valid.  */
6714      src_folded = fold_rtx (src, insn);
6715
6716#if 0
6717      /* ??? This caused bad code to be generated for the m68k port with -O2.
6718	 Suppose src is (CONST_INT -1), and that after truncation src_folded
6719	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
6720	 At the end we will add src and src_const to the same equivalence
6721	 class.  We now have 3 and -1 on the same equivalence class.  This
6722	 causes later instructions to be mis-optimized.  */
6723      /* If storing a constant in a bitfield, pre-truncate the constant
6724	 so we will be able to record it later.  */
6725      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6726	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6727	{
6728	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6729
6730	  if (GET_CODE (src) == CONST_INT
6731	      && GET_CODE (width) == CONST_INT
6732	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6733	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6734	    src_folded
6735	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6736					  << INTVAL (width)) - 1));
6737	}
6738#endif
6739
6740      /* Compute SRC's hash code, and also notice if it
6741	 should not be recorded at all.  In that case,
6742	 prevent any further processing of this assignment.  */
6743      do_not_record = 0;
6744      hash_arg_in_memory = 0;
6745      hash_arg_in_struct = 0;
6746
6747      sets[i].src = src;
6748      sets[i].src_hash = HASH (src, mode);
6749      sets[i].src_volatile = do_not_record;
6750      sets[i].src_in_memory = hash_arg_in_memory;
6751      sets[i].src_in_struct = hash_arg_in_struct;
6752
6753      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6754	 a pseudo that is set more than once, do not record SRC.  Using
6755	 SRC as a replacement for anything else will be incorrect in that
6756	 situation.  Note that this usually occurs only for stack slots,
6757	 in which case all the RTL would be referring to SRC, so we don't
6758	 lose any optimization opportunities by not having SRC in the
6759	 hash table.  */
6760
6761      if (GET_CODE (src) == MEM
6762	  && find_reg_note (insn, REG_EQUIV, src) != 0
6763	  && GET_CODE (dest) == REG
6764	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6765	  && REG_N_SETS (REGNO (dest)) != 1)
6766	sets[i].src_volatile = 1;
6767
6768#if 0
6769      /* It is no longer clear why we used to do this, but it doesn't
6770	 appear to still be needed.  So let's try without it since this
6771	 code hurts cse'ing widened ops.  */
6772      /* If source is a perverse subreg (such as QI treated as an SI),
6773	 treat it as volatile.  It may do the work of an SI in one context
6774	 where the extra bits are not being used, but cannot replace an SI
6775	 in general.  */
6776      if (GET_CODE (src) == SUBREG
6777	  && (GET_MODE_SIZE (GET_MODE (src))
6778	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6779	sets[i].src_volatile = 1;
6780#endif
6781
6782      /* Locate all possible equivalent forms for SRC.  Try to replace
6783         SRC in the insn with each cheaper equivalent.
6784
6785         We have the following types of equivalents: SRC itself, a folded
6786         version, a value given in a REG_EQUAL note, or a value related
6787	 to a constant.
6788
6789         Each of these equivalents may be part of an additional class
6790         of equivalents (if more than one is in the table, they must be in
6791         the same class; we check for this).
6792
6793	 If the source is volatile, we don't do any table lookups.
6794
6795         We note any constant equivalent for possible later use in a
6796         REG_NOTE.  */
6797
6798      if (!sets[i].src_volatile)
6799	elt = lookup (src, sets[i].src_hash, mode);
6800
6801      sets[i].src_elt = elt;
6802
6803      if (elt && src_eqv_here && src_eqv_elt)
6804        {
6805          if (elt->first_same_value != src_eqv_elt->first_same_value)
6806	    {
6807	      /* The REG_EQUAL is indicating that two formerly distinct
6808		 classes are now equivalent.  So merge them.  */
6809	      merge_equiv_classes (elt, src_eqv_elt);
6810	      src_eqv_hash = HASH (src_eqv, elt->mode);
6811	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6812	    }
6813
6814          src_eqv_here = 0;
6815        }
6816
6817      else if (src_eqv_elt)
6818        elt = src_eqv_elt;
6819
6820      /* Try to find a constant somewhere and record it in `src_const'.
6821	 Record its table element, if any, in `src_const_elt'.  Look in
6822	 any known equivalences first.  (If the constant is not in the
6823	 table, also set `sets[i].src_const_hash').  */
6824      if (elt)
6825        for (p = elt->first_same_value; p; p = p->next_same_value)
6826	  if (p->is_const)
6827	    {
6828	      src_const = p->exp;
6829	      src_const_elt = elt;
6830	      break;
6831	    }
6832
6833      if (src_const == 0
6834	  && (CONSTANT_P (src_folded)
6835	      /* Consider (minus (label_ref L1) (label_ref L2)) as
6836		 "constant" here so we will record it. This allows us
6837		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
6838	      || (GET_CODE (src_folded) == MINUS
6839		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6840		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6841	src_const = src_folded, src_const_elt = elt;
6842      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6843	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6844
6845      /* If we don't know if the constant is in the table, get its
6846	 hash code and look it up.  */
6847      if (src_const && src_const_elt == 0)
6848	{
6849	  sets[i].src_const_hash = HASH (src_const, mode);
6850	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6851	}
6852
6853      sets[i].src_const = src_const;
6854      sets[i].src_const_elt = src_const_elt;
6855
6856      /* If the constant and our source are both in the table, mark them as
6857	 equivalent.  Otherwise, if a constant is in the table but the source
6858	 isn't, set ELT to it.  */
6859      if (src_const_elt && elt
6860	  && src_const_elt->first_same_value != elt->first_same_value)
6861	merge_equiv_classes (elt, src_const_elt);
6862      else if (src_const_elt && elt == 0)
6863	elt = src_const_elt;
6864
6865      /* See if there is a register linearly related to a constant
6866         equivalent of SRC.  */
6867      if (src_const
6868	  && (GET_CODE (src_const) == CONST
6869	      || (src_const_elt && src_const_elt->related_value != 0)))
6870        {
6871          src_related = use_related_value (src_const, src_const_elt);
6872          if (src_related)
6873            {
6874	      struct table_elt *src_related_elt
6875		    = lookup (src_related, HASH (src_related, mode), mode);
6876	      if (src_related_elt && elt)
6877	        {
6878		  if (elt->first_same_value
6879		      != src_related_elt->first_same_value)
6880		    /* This can occur when we previously saw a CONST
6881		       involving a SYMBOL_REF and then see the SYMBOL_REF
6882		       twice.  Merge the involved classes.  */
6883		    merge_equiv_classes (elt, src_related_elt);
6884
6885	          src_related = 0;
6886		  src_related_elt = 0;
6887	        }
6888              else if (src_related_elt && elt == 0)
6889	        elt = src_related_elt;
6890	    }
6891        }
6892
6893      /* See if we have a CONST_INT that is already in a register in a
6894	 wider mode.  */
6895
6896      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6897	  && GET_MODE_CLASS (mode) == MODE_INT
6898	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6899	{
6900	  enum machine_mode wider_mode;
6901
6902	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
6903	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6904	       && src_related == 0;
6905	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6906	    {
6907	      struct table_elt *const_elt
6908		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6909
6910	      if (const_elt == 0)
6911		continue;
6912
6913	      for (const_elt = const_elt->first_same_value;
6914		   const_elt; const_elt = const_elt->next_same_value)
6915		if (GET_CODE (const_elt->exp) == REG)
6916		  {
6917		    src_related = gen_lowpart_if_possible (mode,
6918							   const_elt->exp);
6919		    break;
6920		  }
6921	    }
6922	}
6923
6924      /* Another possibility is that we have an AND with a constant in
6925	 a mode narrower than a word.  If so, it might have been generated
6926	 as part of an "if" which would narrow the AND.  If we already
6927	 have done the AND in a wider mode, we can use a SUBREG of that
6928	 value.  */
6929
6930      if (flag_expensive_optimizations && ! src_related
6931	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6932	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6933	{
6934	  enum machine_mode tmode;
6935	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6936
6937	  for (tmode = GET_MODE_WIDER_MODE (mode);
6938	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6939	       tmode = GET_MODE_WIDER_MODE (tmode))
6940	    {
6941	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6942	      struct table_elt *larger_elt;
6943
6944	      if (inner)
6945		{
6946		  PUT_MODE (new_and, tmode);
6947		  XEXP (new_and, 0) = inner;
6948		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6949		  if (larger_elt == 0)
6950		    continue;
6951
6952		  for (larger_elt = larger_elt->first_same_value;
6953		       larger_elt; larger_elt = larger_elt->next_same_value)
6954		    if (GET_CODE (larger_elt->exp) == REG)
6955		      {
6956			src_related
6957			  = gen_lowpart_if_possible (mode, larger_elt->exp);
6958			break;
6959		      }
6960
6961		  if (src_related)
6962		    break;
6963		}
6964	    }
6965	}
6966
6967#ifdef LOAD_EXTEND_OP
6968      /* See if a MEM has already been loaded with a widening operation;
6969	 if it has, we can use a subreg of that.  Many CISC machines
6970	 also have such operations, but this is only likely to be
6971	 beneficial these machines.  */
6972
6973      if (flag_expensive_optimizations &&  src_related == 0
6974	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6975	  && GET_MODE_CLASS (mode) == MODE_INT
6976	  && GET_CODE (src) == MEM && ! do_not_record
6977	  && LOAD_EXTEND_OP (mode) != NIL)
6978	{
6979	  enum machine_mode tmode;
6980
6981	  /* Set what we are trying to extend and the operation it might
6982	     have been extended with.  */
6983	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6984	  XEXP (memory_extend_rtx, 0) = src;
6985
6986	  for (tmode = GET_MODE_WIDER_MODE (mode);
6987	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6988	       tmode = GET_MODE_WIDER_MODE (tmode))
6989	    {
6990	      struct table_elt *larger_elt;
6991
6992	      PUT_MODE (memory_extend_rtx, tmode);
6993	      larger_elt = lookup (memory_extend_rtx,
6994				   HASH (memory_extend_rtx, tmode), tmode);
6995	      if (larger_elt == 0)
6996		continue;
6997
6998	      for (larger_elt = larger_elt->first_same_value;
6999		   larger_elt; larger_elt = larger_elt->next_same_value)
7000		if (GET_CODE (larger_elt->exp) == REG)
7001		  {
7002		    src_related = gen_lowpart_if_possible (mode,
7003							   larger_elt->exp);
7004		    break;
7005		  }
7006
7007	      if (src_related)
7008		break;
7009	    }
7010	}
7011#endif /* LOAD_EXTEND_OP */
7012
7013      if (src == src_folded)
7014        src_folded = 0;
7015
7016      /* At this point, ELT, if non-zero, points to a class of expressions
7017         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
7018	 and SRC_RELATED, if non-zero, each contain additional equivalent
7019	 expressions.  Prune these latter expressions by deleting expressions
7020	 already in the equivalence class.
7021
7022	 Check for an equivalent identical to the destination.  If found,
7023	 this is the preferred equivalent since it will likely lead to
7024	 elimination of the insn.  Indicate this by placing it in
7025	 `src_related'.  */
7026
7027      if (elt) elt = elt->first_same_value;
7028      for (p = elt; p; p = p->next_same_value)
7029        {
7030	  enum rtx_code code = GET_CODE (p->exp);
7031
7032	  /* If the expression is not valid, ignore it.  Then we do not
7033	     have to check for validity below.  In most cases, we can use
7034	     `rtx_equal_p', since canonicalization has already been done.  */
7035	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
7036	    continue;
7037
7038	  /* Also skip paradoxical subregs, unless that's what we're
7039	     looking for.  */
7040	  if (code == SUBREG
7041	      && (GET_MODE_SIZE (GET_MODE (p->exp))
7042		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
7043	      && ! (src != 0
7044		    && GET_CODE (src) == SUBREG
7045		    && GET_MODE (src) == GET_MODE (p->exp)
7046		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7047			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
7048	    continue;
7049
7050          if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7051	    src = 0;
7052          else if (src_folded && GET_CODE (src_folded) == code
7053		   && rtx_equal_p (src_folded, p->exp))
7054	    src_folded = 0;
7055          else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7056		   && rtx_equal_p (src_eqv_here, p->exp))
7057	    src_eqv_here = 0;
7058          else if (src_related && GET_CODE (src_related) == code
7059		   && rtx_equal_p (src_related, p->exp))
7060	    src_related = 0;
7061
7062	  /* This is the same as the destination of the insns, we want
7063	     to prefer it.  Copy it to src_related.  The code below will
7064	     then give it a negative cost.  */
7065	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7066	    src_related = dest;
7067
7068        }
7069
7070      /* Find the cheapest valid equivalent, trying all the available
7071         possibilities.  Prefer items not in the hash table to ones
7072         that are when they are equal cost.  Note that we can never
7073         worsen an insn as the current contents will also succeed.
7074	 If we find an equivalent identical to the destination, use it as best,
7075	 since this insn will probably be eliminated in that case.  */
7076      if (src)
7077	{
7078	  if (rtx_equal_p (src, dest))
7079	    src_cost = -1;
7080	  else
7081	    src_cost = COST (src);
7082	}
7083
7084      if (src_eqv_here)
7085	{
7086	  if (rtx_equal_p (src_eqv_here, dest))
7087	    src_eqv_cost = -1;
7088	  else
7089	    src_eqv_cost = COST (src_eqv_here);
7090	}
7091
7092      if (src_folded)
7093	{
7094	  if (rtx_equal_p (src_folded, dest))
7095	    src_folded_cost = -1;
7096	  else
7097	    src_folded_cost = COST (src_folded);
7098	}
7099
7100      if (src_related)
7101	{
7102	  if (rtx_equal_p (src_related, dest))
7103	    src_related_cost = -1;
7104	  else
7105	    src_related_cost = COST (src_related);
7106	}
7107
7108      /* If this was an indirect jump insn, a known label will really be
7109	 cheaper even though it looks more expensive.  */
7110      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7111	src_folded = src_const, src_folded_cost = -1;
7112
7113      /* Terminate loop when replacement made.  This must terminate since
7114         the current contents will be tested and will always be valid.  */
7115      while (1)
7116        {
7117          rtx trial, old_src;
7118
7119          /* Skip invalid entries.  */
7120          while (elt && GET_CODE (elt->exp) != REG
7121	         && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7122	    elt = elt->next_same_value;
7123
7124	  /* A paradoxical subreg would be bad here: it'll be the right
7125	     size, but later may be adjusted so that the upper bits aren't
7126	     what we want.  So reject it.  */
7127	  if (elt != 0
7128	      && GET_CODE (elt->exp) == SUBREG
7129	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
7130		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7131	      /* It is okay, though, if the rtx we're trying to match
7132		 will ignore any of the bits we can't predict.  */
7133	      && ! (src != 0
7134		    && GET_CODE (src) == SUBREG
7135		    && GET_MODE (src) == GET_MODE (elt->exp)
7136		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7137			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7138	    {
7139	      elt = elt->next_same_value;
7140	      continue;
7141	    }
7142
7143          if (elt) src_elt_cost = elt->cost;
7144
7145          /* Find cheapest and skip it for the next time.   For items
7146	     of equal cost, use this order:
7147	     src_folded, src, src_eqv, src_related and hash table entry.  */
7148          if (src_folded_cost <= src_cost
7149	      && src_folded_cost <= src_eqv_cost
7150	      && src_folded_cost <= src_related_cost
7151	      && src_folded_cost <= src_elt_cost)
7152	    {
7153	      trial = src_folded, src_folded_cost = 10000;
7154	      if (src_folded_force_flag)
7155		trial = force_const_mem (mode, trial);
7156	    }
7157          else if (src_cost <= src_eqv_cost
7158	           && src_cost <= src_related_cost
7159	           && src_cost <= src_elt_cost)
7160	    trial = src, src_cost = 10000;
7161          else if (src_eqv_cost <= src_related_cost
7162	           && src_eqv_cost <= src_elt_cost)
7163	    trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7164          else if (src_related_cost <= src_elt_cost)
7165	    trial = copy_rtx (src_related), src_related_cost = 10000;
7166          else
7167	    {
7168	      trial = copy_rtx (elt->exp);
7169	      elt = elt->next_same_value;
7170	      src_elt_cost = 10000;
7171	    }
7172
7173	  /* We don't normally have an insn matching (set (pc) (pc)), so
7174	     check for this separately here.  We will delete such an
7175	     insn below.
7176
7177	     Tablejump insns contain a USE of the table, so simply replacing
7178	     the operand with the constant won't match.  This is simply an
7179	     unconditional branch, however, and is therefore valid.  Just
7180	     insert the substitution here and we will delete and re-emit
7181	     the insn later.  */
7182
7183	  /* Keep track of the original SET_SRC so that we can fix notes
7184	     on libcall instructions.  */
7185 	  old_src = SET_SRC (sets[i].rtl);
7186
7187	  if (n_sets == 1 && dest == pc_rtx
7188	      && (trial == pc_rtx
7189		  || (GET_CODE (trial) == LABEL_REF
7190		      && ! condjump_p (insn))))
7191	    {
7192	      /* If TRIAL is a label in front of a jump table, we are
7193		 really falling through the switch (this is how casesi
7194		 insns work), so we must branch around the table.  */
7195	      if (GET_CODE (trial) == CODE_LABEL
7196		  && NEXT_INSN (trial) != 0
7197		  && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7198		  && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7199		      || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7200
7201		trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7202
7203	      SET_SRC (sets[i].rtl) = trial;
7204 	      cse_jumps_altered = 1;
7205	      break;
7206	    }
7207
7208	  /* Look for a substitution that makes a valid insn.  */
7209          else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
7210	    {
7211	      /* If we just made a substitution inside a libcall, then we
7212		 need to make the same substitution in any notes attached
7213		 to the RETVAL insn.  */
7214	      if (libcall_insn
7215		  && (GET_CODE (old_src) == REG
7216		      || GET_CODE (old_src) == SUBREG
7217		      ||  GET_CODE (old_src) == MEM))
7218		replace_rtx (REG_NOTES (libcall_insn), old_src,
7219			     canon_reg (SET_SRC (sets[i].rtl), insn));
7220
7221	      /* The result of apply_change_group can be ignored; see
7222		 canon_reg.  */
7223
7224	      validate_change (insn, &SET_SRC (sets[i].rtl),
7225			       canon_reg (SET_SRC (sets[i].rtl), insn),
7226			       1);
7227	      apply_change_group ();
7228	      break;
7229	    }
7230
7231	  /* If we previously found constant pool entries for
7232	     constants and this is a constant, try making a
7233	     pool entry.  Put it in src_folded unless we already have done
7234	     this since that is where it likely came from.  */
7235
7236	  else if (constant_pool_entries_cost
7237		   && CONSTANT_P (trial)
7238		   && ! (GET_CODE (trial) == CONST
7239			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7240		   && (src_folded == 0
7241		       || (GET_CODE (src_folded) != MEM
7242			   && ! src_folded_force_flag))
7243		   && GET_MODE_CLASS (mode) != MODE_CC
7244		   && mode != VOIDmode)
7245	    {
7246	      src_folded_force_flag = 1;
7247	      src_folded = trial;
7248	      src_folded_cost = constant_pool_entries_cost;
7249	    }
7250        }
7251
7252      src = SET_SRC (sets[i].rtl);
7253
7254      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7255	 However, there is an important exception:  If both are registers
7256	 that are not the head of their equivalence class, replace SET_SRC
7257	 with the head of the class.  If we do not do this, we will have
7258	 both registers live over a portion of the basic block.  This way,
7259	 their lifetimes will likely abut instead of overlapping.  */
7260      if (GET_CODE (dest) == REG
7261	  && REGNO_QTY_VALID_P (REGNO (dest))
7262	  && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7263	  && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7264	  && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7265	  /* Don't do this if the original insn had a hard reg as
7266	     SET_SRC.  */
7267	  && (GET_CODE (sets[i].src) != REG
7268	      || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7269	/* We can't call canon_reg here because it won't do anything if
7270	   SRC is a hard register.  */
7271	{
7272	  int first = qty_first_reg[REG_QTY (REGNO (src))];
7273	  rtx new_src
7274	    = (first >= FIRST_PSEUDO_REGISTER
7275	       ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7276
7277	  /* We must use validate-change even for this, because this
7278	     might be a special no-op instruction, suitable only to
7279	     tag notes onto.  */
7280	  if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7281	    {
7282	      src = new_src;
7283	      /* If we had a constant that is cheaper than what we are now
7284		 setting SRC to, use that constant.  We ignored it when we
7285		 thought we could make this into a no-op.  */
7286	      if (src_const && COST (src_const) < COST (src)
7287		  && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7288				      0))
7289		src = src_const;
7290	    }
7291	}
7292
7293      /* If we made a change, recompute SRC values.  */
7294      if (src != sets[i].src)
7295        {
7296          do_not_record = 0;
7297          hash_arg_in_memory = 0;
7298          hash_arg_in_struct = 0;
7299	  sets[i].src = src;
7300          sets[i].src_hash = HASH (src, mode);
7301          sets[i].src_volatile = do_not_record;
7302          sets[i].src_in_memory = hash_arg_in_memory;
7303          sets[i].src_in_struct = hash_arg_in_struct;
7304          sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7305        }
7306
7307      /* If this is a single SET, we are setting a register, and we have an
7308	 equivalent constant, we want to add a REG_NOTE.   We don't want
7309	 to write a REG_EQUAL note for a constant pseudo since verifying that
7310	 that pseudo hasn't been eliminated is a pain.  Such a note also
7311	 won't help anything.
7312
7313	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7314	 which can be created for a reference to a compile time computable
7315	 entry in a jump table.  */
7316
7317      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7318	  && GET_CODE (src_const) != REG
7319	  && ! (GET_CODE (src_const) == CONST
7320		&& GET_CODE (XEXP (src_const, 0)) == MINUS
7321		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7322		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7323	{
7324	  tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7325
7326	  /* Make sure that the rtx is not shared with any other insn.  */
7327	  src_const = copy_rtx (src_const);
7328
7329	  /* Record the actual constant value in a REG_EQUAL note, making
7330	     a new one if one does not already exist.  */
7331	  if (tem)
7332	    XEXP (tem, 0) = src_const;
7333	  else
7334	    REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7335						  src_const, REG_NOTES (insn));
7336
7337          /* If storing a constant value in a register that
7338	     previously held the constant value 0,
7339	     record this fact with a REG_WAS_0 note on this insn.
7340
7341	     Note that the *register* is required to have previously held 0,
7342	     not just any register in the quantity and we must point to the
7343	     insn that set that register to zero.
7344
7345	     Rather than track each register individually, we just see if
7346	     the last set for this quantity was for this register.  */
7347
7348	  if (REGNO_QTY_VALID_P (REGNO (dest))
7349	      && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7350	    {
7351	      /* See if we previously had a REG_WAS_0 note.  */
7352	      rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7353	      rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7354
7355	      if ((tem = single_set (const_insn)) != 0
7356		  && rtx_equal_p (SET_DEST (tem), dest))
7357		{
7358		  if (note)
7359		    XEXP (note, 0) = const_insn;
7360		  else
7361		    REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7362							  const_insn,
7363							  REG_NOTES (insn));
7364		}
7365	    }
7366	}
7367
7368      /* Now deal with the destination.  */
7369      do_not_record = 0;
7370      sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7371
7372      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7373	 to the MEM or REG within it.  */
7374      while (GET_CODE (dest) == SIGN_EXTRACT
7375	     || GET_CODE (dest) == ZERO_EXTRACT
7376	     || GET_CODE (dest) == SUBREG
7377	     || GET_CODE (dest) == STRICT_LOW_PART)
7378	{
7379	  sets[i].inner_dest_loc = &XEXP (dest, 0);
7380	  dest = XEXP (dest, 0);
7381	}
7382
7383      sets[i].inner_dest = dest;
7384
7385      if (GET_CODE (dest) == MEM)
7386	{
7387#ifdef PUSH_ROUNDING
7388	  /* Stack pushes invalidate the stack pointer.  */
7389	  rtx addr = XEXP (dest, 0);
7390	  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7391	       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7392	      && XEXP (addr, 0) == stack_pointer_rtx)
7393	    invalidate (stack_pointer_rtx, Pmode);
7394#endif
7395	  dest = fold_rtx (dest, insn);
7396	}
7397
7398      /* Compute the hash code of the destination now,
7399	 before the effects of this instruction are recorded,
7400	 since the register values used in the address computation
7401	 are those before this instruction.  */
7402      sets[i].dest_hash = HASH (dest, mode);
7403
7404      /* Don't enter a bit-field in the hash table
7405	 because the value in it after the store
7406	 may not equal what was stored, due to truncation.  */
7407
7408      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7409	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7410	{
7411	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7412
7413	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7414	      && GET_CODE (width) == CONST_INT
7415	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7416	      && ! (INTVAL (src_const)
7417		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7418	    /* Exception: if the value is constant,
7419	       and it won't be truncated, record it.  */
7420	    ;
7421	  else
7422	    {
7423	      /* This is chosen so that the destination will be invalidated
7424		 but no new value will be recorded.
7425		 We must invalidate because sometimes constant
7426		 values can be recorded for bitfields.  */
7427	      sets[i].src_elt = 0;
7428	      sets[i].src_volatile = 1;
7429	      src_eqv = 0;
7430	      src_eqv_elt = 0;
7431	    }
7432	}
7433
7434      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7435	 the insn.  */
7436      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7437	{
7438	  PUT_CODE (insn, NOTE);
7439	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7440	  NOTE_SOURCE_FILE (insn) = 0;
7441	  cse_jumps_altered = 1;
7442	  /* One less use of the label this insn used to jump to.  */
7443	  if (JUMP_LABEL (insn) != 0)
7444	    --LABEL_NUSES (JUMP_LABEL (insn));
7445	  /* No more processing for this set.  */
7446	  sets[i].rtl = 0;
7447	}
7448
7449      /* If this SET is now setting PC to a label, we know it used to
7450	 be a conditional or computed branch.  So we see if we can follow
7451	 it.  If it was a computed branch, delete it and re-emit.  */
7452      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7453	{
7454	  rtx p;
7455
7456	  /* If this is not in the format for a simple branch and
7457	     we are the only SET in it, re-emit it.  */
7458	  if (! simplejump_p (insn) && n_sets == 1)
7459	    {
7460	      rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7461	      JUMP_LABEL (new) = XEXP (src, 0);
7462	      LABEL_NUSES (XEXP (src, 0))++;
7463	      delete_insn (insn);
7464	      insn = new;
7465	    }
7466	  else
7467	    /* Otherwise, force rerecognition, since it probably had
7468	       a different pattern before.
7469	       This shouldn't really be necessary, since whatever
7470	       changed the source value above should have done this.
7471	       Until the right place is found, might as well do this here.  */
7472	    INSN_CODE (insn) = -1;
7473
7474	  /* Now that we've converted this jump to an unconditional jump,
7475	     there is dead code after it.  Delete the dead code until we
7476	     reach a BARRIER, the end of the function, or a label.  Do
7477	     not delete NOTEs except for NOTE_INSN_DELETED since later
7478	     phases assume these notes are retained.  */
7479
7480	  p = insn;
7481
7482	  while (NEXT_INSN (p) != 0
7483		 && GET_CODE (NEXT_INSN (p)) != BARRIER
7484		 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7485	    {
7486	      /* Note, we must update P with the return value from
7487		 delete_insn, otherwise we could get an infinite loop
7488		 if NEXT_INSN (p) had INSN_DELETED_P set.  */
7489	      if (GET_CODE (NEXT_INSN (p)) != NOTE
7490		  || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7491		p = PREV_INSN (delete_insn (NEXT_INSN (p)));
7492	      else
7493		p = NEXT_INSN (p);
7494	    }
7495
7496	  /* If we don't have a BARRIER immediately after INSN, put one there.
7497	     Much code assumes that there are no NOTEs between a JUMP_INSN and
7498	     BARRIER.  */
7499
7500	  if (NEXT_INSN (insn) == 0
7501	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7502	    emit_barrier_before (NEXT_INSN (insn));
7503
7504	  /* We might have two BARRIERs separated by notes.  Delete the second
7505	     one if so.  */
7506
7507	  if (p != insn && NEXT_INSN (p) != 0
7508	      && GET_CODE (NEXT_INSN (p)) == BARRIER)
7509	    delete_insn (NEXT_INSN (p));
7510
7511	  cse_jumps_altered = 1;
7512	  sets[i].rtl = 0;
7513	}
7514
7515      /* If destination is volatile, invalidate it and then do no further
7516	 processing for this assignment.  */
7517
7518      else if (do_not_record)
7519	{
7520	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7521	      || GET_CODE (dest) == MEM)
7522	    invalidate (dest, VOIDmode);
7523	  else if (GET_CODE (dest) == STRICT_LOW_PART
7524		   || GET_CODE (dest) == ZERO_EXTRACT)
7525	    invalidate (XEXP (dest, 0), GET_MODE (dest));
7526	  sets[i].rtl = 0;
7527	}
7528
7529      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7530	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7531
7532#ifdef HAVE_cc0
7533      /* If setting CC0, record what it was set to, or a constant, if it
7534	 is equivalent to a constant.  If it is being set to a floating-point
7535	 value, make a COMPARE with the appropriate constant of 0.  If we
7536	 don't do this, later code can interpret this as a test against
7537	 const0_rtx, which can cause problems if we try to put it into an
7538	 insn as a floating-point operand.  */
7539      if (dest == cc0_rtx)
7540	{
7541	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7542	  this_insn_cc0_mode = mode;
7543	  if (FLOAT_MODE_P (mode))
7544	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7545					     CONST0_RTX (mode));
7546	}
7547#endif
7548    }
7549
7550  /* Now enter all non-volatile source expressions in the hash table
7551     if they are not already present.
7552     Record their equivalence classes in src_elt.
7553     This way we can insert the corresponding destinations into
7554     the same classes even if the actual sources are no longer in them
7555     (having been invalidated).  */
7556
7557  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7558      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7559    {
7560      register struct table_elt *elt;
7561      register struct table_elt *classp = sets[0].src_elt;
7562      rtx dest = SET_DEST (sets[0].rtl);
7563      enum machine_mode eqvmode = GET_MODE (dest);
7564
7565      if (GET_CODE (dest) == STRICT_LOW_PART)
7566	{
7567	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7568	  classp = 0;
7569	}
7570      if (insert_regs (src_eqv, classp, 0))
7571	{
7572	  rehash_using_reg (src_eqv);
7573	  src_eqv_hash = HASH (src_eqv, eqvmode);
7574	}
7575      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7576      elt->in_memory = src_eqv_in_memory;
7577      elt->in_struct = src_eqv_in_struct;
7578      src_eqv_elt = elt;
7579
7580      /* Check to see if src_eqv_elt is the same as a set source which
7581	 does not yet have an elt, and if so set the elt of the set source
7582	 to src_eqv_elt.  */
7583      for (i = 0; i < n_sets; i++)
7584	if (sets[i].rtl && sets[i].src_elt == 0
7585	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7586	  sets[i].src_elt = src_eqv_elt;
7587    }
7588
7589  for (i = 0; i < n_sets; i++)
7590    if (sets[i].rtl && ! sets[i].src_volatile
7591	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7592      {
7593	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7594	  {
7595	    /* REG_EQUAL in setting a STRICT_LOW_PART
7596	       gives an equivalent for the entire destination register,
7597	       not just for the subreg being stored in now.
7598	       This is a more interesting equivalence, so we arrange later
7599	       to treat the entire reg as the destination.  */
7600	    sets[i].src_elt = src_eqv_elt;
7601	    sets[i].src_hash = src_eqv_hash;
7602	  }
7603	else
7604	  {
7605	    /* Insert source and constant equivalent into hash table, if not
7606	       already present.  */
7607	    register struct table_elt *classp = src_eqv_elt;
7608	    register rtx src = sets[i].src;
7609	    register rtx dest = SET_DEST (sets[i].rtl);
7610	    enum machine_mode mode
7611	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7612
7613	    /* Don't put a hard register source into the table if this is
7614	       the last insn of a libcall.  */
7615	    if (sets[i].src_elt == 0
7616		&& (GET_CODE (src) != REG
7617		    || REGNO (src) >= FIRST_PSEUDO_REGISTER
7618		    || ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
7619	      {
7620		register struct table_elt *elt;
7621
7622		/* Note that these insert_regs calls cannot remove
7623		   any of the src_elt's, because they would have failed to
7624		   match if not still valid.  */
7625		if (insert_regs (src, classp, 0))
7626		  {
7627		    rehash_using_reg (src);
7628		    sets[i].src_hash = HASH (src, mode);
7629		  }
7630		elt = insert (src, classp, sets[i].src_hash, mode);
7631		elt->in_memory = sets[i].src_in_memory;
7632		elt->in_struct = sets[i].src_in_struct;
7633		sets[i].src_elt = classp = elt;
7634	      }
7635
7636	    if (sets[i].src_const && sets[i].src_const_elt == 0
7637		&& src != sets[i].src_const
7638		&& ! rtx_equal_p (sets[i].src_const, src))
7639	      sets[i].src_elt = insert (sets[i].src_const, classp,
7640					sets[i].src_const_hash, mode);
7641	  }
7642      }
7643    else if (sets[i].src_elt == 0)
7644      /* If we did not insert the source into the hash table (e.g., it was
7645	 volatile), note the equivalence class for the REG_EQUAL value, if any,
7646	 so that the destination goes into that class.  */
7647      sets[i].src_elt = src_eqv_elt;
7648
7649  invalidate_from_clobbers (x);
7650
7651  /* Some registers are invalidated by subroutine calls.  Memory is
7652     invalidated by non-constant calls.  */
7653
7654  if (GET_CODE (insn) == CALL_INSN)
7655    {
7656      if (! CONST_CALL_P (insn))
7657	invalidate_memory ();
7658      invalidate_for_call ();
7659    }
7660
7661  /* Now invalidate everything set by this instruction.
7662     If a SUBREG or other funny destination is being set,
7663     sets[i].rtl is still nonzero, so here we invalidate the reg
7664     a part of which is being set.  */
7665
7666  for (i = 0; i < n_sets; i++)
7667    if (sets[i].rtl)
7668      {
7669	/* We can't use the inner dest, because the mode associated with
7670	   a ZERO_EXTRACT is significant.  */
7671	register rtx dest = SET_DEST (sets[i].rtl);
7672
7673	/* Needed for registers to remove the register from its
7674	   previous quantity's chain.
7675	   Needed for memory if this is a nonvarying address, unless
7676	   we have just done an invalidate_memory that covers even those.  */
7677	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7678	    || GET_CODE (dest) == MEM)
7679	  invalidate (dest, VOIDmode);
7680	else if (GET_CODE (dest) == STRICT_LOW_PART
7681		 || GET_CODE (dest) == ZERO_EXTRACT)
7682	  invalidate (XEXP (dest, 0), GET_MODE (dest));
7683      }
7684
7685  /* A volatile ASM invalidates everything.  */
7686  if (GET_CODE (insn) == INSN
7687      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7688      && MEM_VOLATILE_P (PATTERN (insn)))
7689    flush_hash_table ();
7690
7691  /* Make sure registers mentioned in destinations
7692     are safe for use in an expression to be inserted.
7693     This removes from the hash table
7694     any invalid entry that refers to one of these registers.
7695
7696     We don't care about the return value from mention_regs because
7697     we are going to hash the SET_DEST values unconditionally.  */
7698
7699  for (i = 0; i < n_sets; i++)
7700    {
7701      if (sets[i].rtl)
7702	{
7703	  rtx x = SET_DEST (sets[i].rtl);
7704
7705	  if (GET_CODE (x) != REG)
7706	    mention_regs (x);
7707	  else
7708	    {
7709	      /* We used to rely on all references to a register becoming
7710		 inaccessible when a register changes to a new quantity,
7711		 since that changes the hash code.  However, that is not
7712		 safe, since after NBUCKETS new quantities we get a
7713		 hash 'collision' of a register with its own invalid
7714		 entries.  And since SUBREGs have been changed not to
7715		 change their hash code with the hash code of the register,
7716		 it wouldn't work any longer at all.  So we have to check
7717		 for any invalid references lying around now.
7718		 This code is similar to the REG case in mention_regs,
7719		 but it knows that reg_tick has been incremented, and
7720		 it leaves reg_in_table as -1 .  */
7721	      register int regno = REGNO (x);
7722	      register int endregno
7723		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7724			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7725	      int i;
7726
7727	      for (i = regno; i < endregno; i++)
7728		{
7729		  if (REG_IN_TABLE (i) >= 0)
7730		    {
7731		      remove_invalid_refs (i);
7732		      REG_IN_TABLE (i) = -1;
7733		    }
7734		}
7735	    }
7736	}
7737    }
7738
7739  /* We may have just removed some of the src_elt's from the hash table.
7740     So replace each one with the current head of the same class.  */
7741
7742  for (i = 0; i < n_sets; i++)
7743    if (sets[i].rtl)
7744      {
7745	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7746	  /* If elt was removed, find current head of same class,
7747	     or 0 if nothing remains of that class.  */
7748	  {
7749	    register struct table_elt *elt = sets[i].src_elt;
7750
7751	    while (elt && elt->prev_same_value)
7752	      elt = elt->prev_same_value;
7753
7754	    while (elt && elt->first_same_value == 0)
7755	      elt = elt->next_same_value;
7756	    sets[i].src_elt = elt ? elt->first_same_value : 0;
7757	  }
7758      }
7759
7760  /* Now insert the destinations into their equivalence classes.  */
7761
7762  for (i = 0; i < n_sets; i++)
7763    if (sets[i].rtl)
7764      {
7765	register rtx dest = SET_DEST (sets[i].rtl);
7766	rtx inner_dest = sets[i].inner_dest;
7767	register struct table_elt *elt;
7768
7769	/* Don't record value if we are not supposed to risk allocating
7770	   floating-point values in registers that might be wider than
7771	   memory.  */
7772	if ((flag_float_store
7773	     && GET_CODE (dest) == MEM
7774	     && FLOAT_MODE_P (GET_MODE (dest)))
7775	    /* Don't record BLKmode values, because we don't know the
7776	       size of it, and can't be sure that other BLKmode values
7777	       have the same or smaller size.  */
7778	    || GET_MODE (dest) == BLKmode
7779	    /* Don't record values of destinations set inside a libcall block
7780	       since we might delete the libcall.  Things should have been set
7781	       up so we won't want to reuse such a value, but we play it safe
7782	       here.  */
7783	    || libcall_insn
7784	    /* If we didn't put a REG_EQUAL value or a source into the hash
7785	       table, there is no point is recording DEST.  */
7786	    || sets[i].src_elt == 0
7787	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7788	       or SIGN_EXTEND, don't record DEST since it can cause
7789	       some tracking to be wrong.
7790
7791	       ??? Think about this more later.  */
7792	    || (GET_CODE (dest) == SUBREG
7793		&& (GET_MODE_SIZE (GET_MODE (dest))
7794		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7795		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
7796		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7797	  continue;
7798
7799	/* STRICT_LOW_PART isn't part of the value BEING set,
7800	   and neither is the SUBREG inside it.
7801	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
7802	if (GET_CODE (dest) == STRICT_LOW_PART)
7803	  dest = SUBREG_REG (XEXP (dest, 0));
7804
7805	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7806	  /* Registers must also be inserted into chains for quantities.  */
7807	  if (insert_regs (dest, sets[i].src_elt, 1))
7808	    {
7809	      /* If `insert_regs' changes something, the hash code must be
7810		 recalculated.  */
7811	      rehash_using_reg (dest);
7812	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7813	    }
7814
7815	if (GET_CODE (inner_dest) == MEM
7816	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7817	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7818	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
7819	     Consider the case in which the address of the MEM is
7820	     passed to a function, which alters the MEM.  Then, if we
7821	     later use Y instead of the MEM we'll miss the update.  */
7822	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7823	else
7824	  elt = insert (dest, sets[i].src_elt,
7825			sets[i].dest_hash, GET_MODE (dest));
7826
7827	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7828			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7829			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7830							  0))));
7831
7832	if (elt->in_memory)
7833	  {
7834	    /* This implicitly assumes a whole struct
7835	       need not have MEM_IN_STRUCT_P.
7836	       But a whole struct is *supposed* to have MEM_IN_STRUCT_P.  */
7837	    elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7838			      || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7839	  }
7840
7841	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7842	   narrower than M2, and both M1 and M2 are the same number of words,
7843	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7844	   make that equivalence as well.
7845
7846	   However, BAR may have equivalences for which gen_lowpart_if_possible
7847	   will produce a simpler value than gen_lowpart_if_possible applied to
7848	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7849	   BAR's equivalences.  If we don't get a simplified form, make
7850	   the SUBREG.  It will not be used in an equivalence, but will
7851	   cause two similar assignments to be detected.
7852
7853	   Note the loop below will find SUBREG_REG (DEST) since we have
7854	   already entered SRC and DEST of the SET in the table.  */
7855
7856	if (GET_CODE (dest) == SUBREG
7857	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7858		 / UNITS_PER_WORD)
7859		== (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7860	    && (GET_MODE_SIZE (GET_MODE (dest))
7861		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7862	    && sets[i].src_elt != 0)
7863	  {
7864	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7865	    struct table_elt *elt, *classp = 0;
7866
7867	    for (elt = sets[i].src_elt->first_same_value; elt;
7868		 elt = elt->next_same_value)
7869	      {
7870		rtx new_src = 0;
7871		unsigned src_hash;
7872		struct table_elt *src_elt;
7873
7874		/* Ignore invalid entries.  */
7875		if (GET_CODE (elt->exp) != REG
7876		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7877		  continue;
7878
7879		new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7880		if (new_src == 0)
7881		  new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7882
7883		src_hash = HASH (new_src, new_mode);
7884		src_elt = lookup (new_src, src_hash, new_mode);
7885
7886		/* Put the new source in the hash table is if isn't
7887		   already.  */
7888		if (src_elt == 0)
7889		  {
7890		    if (insert_regs (new_src, classp, 0))
7891		      {
7892			rehash_using_reg (new_src);
7893			src_hash = HASH (new_src, new_mode);
7894		      }
7895		    src_elt = insert (new_src, classp, src_hash, new_mode);
7896		    src_elt->in_memory = elt->in_memory;
7897		    src_elt->in_struct = elt->in_struct;
7898		  }
7899		else if (classp && classp != src_elt->first_same_value)
7900		  /* Show that two things that we've seen before are
7901		     actually the same.  */
7902		  merge_equiv_classes (src_elt, classp);
7903
7904		classp = src_elt->first_same_value;
7905		/* Ignore invalid entries.  */
7906		while (classp
7907		       && GET_CODE (classp->exp) != REG
7908		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7909		  classp = classp->next_same_value;
7910	      }
7911	  }
7912      }
7913
7914  /* Special handling for (set REG0 REG1)
7915     where REG0 is the "cheapest", cheaper than REG1.
7916     After cse, REG1 will probably not be used in the sequel,
7917     so (if easily done) change this insn to (set REG1 REG0) and
7918     replace REG1 with REG0 in the previous insn that computed their value.
7919     Then REG1 will become a dead store and won't cloud the situation
7920     for later optimizations.
7921
7922     Do not make this change if REG1 is a hard register, because it will
7923     then be used in the sequel and we may be changing a two-operand insn
7924     into a three-operand insn.
7925
7926     Also do not do this if we are operating on a copy of INSN.
7927
7928     Also don't do this if INSN ends a libcall; this would cause an unrelated
7929     register to be set in the middle of a libcall, and we then get bad code
7930     if the libcall is deleted.  */
7931
7932  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7933      && NEXT_INSN (PREV_INSN (insn)) == insn
7934      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7935      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7936      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7937      && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
7938	  == REGNO (SET_DEST (sets[0].rtl)))
7939      && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7940    {
7941      rtx prev = PREV_INSN (insn);
7942      while (prev && GET_CODE (prev) == NOTE)
7943	prev = PREV_INSN (prev);
7944
7945      if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7946	  && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7947	{
7948	  rtx dest = SET_DEST (sets[0].rtl);
7949	  rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7950
7951	  validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7952	  validate_change (insn, & SET_DEST (sets[0].rtl),
7953			   SET_SRC (sets[0].rtl), 1);
7954	  validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7955	  apply_change_group ();
7956
7957	  /* If REG1 was equivalent to a constant, REG0 is not.  */
7958	  if (note)
7959	    PUT_REG_NOTE_KIND (note, REG_EQUAL);
7960
7961	  /* If there was a REG_WAS_0 note on PREV, remove it.  Move
7962	     any REG_WAS_0 note on INSN to PREV.  */
7963	  note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7964	  if (note)
7965	    remove_note (prev, note);
7966
7967	  note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7968	  if (note)
7969	    {
7970	      remove_note (insn, note);
7971	      XEXP (note, 1) = REG_NOTES (prev);
7972	      REG_NOTES (prev) = note;
7973	    }
7974
7975	  /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7976	     then we must delete it, because the value in REG0 has changed.  */
7977	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7978	  if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7979	    remove_note (insn, note);
7980	}
7981    }
7982
7983  /* If this is a conditional jump insn, record any known equivalences due to
7984     the condition being tested.  */
7985
7986  last_jump_equiv_class = 0;
7987  if (GET_CODE (insn) == JUMP_INSN
7988      && n_sets == 1 && GET_CODE (x) == SET
7989      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7990    record_jump_equiv (insn, 0);
7991
7992#ifdef HAVE_cc0
7993  /* If the previous insn set CC0 and this insn no longer references CC0,
7994     delete the previous insn.  Here we use the fact that nothing expects CC0
7995     to be valid over an insn, which is true until the final pass.  */
7996  if (prev_insn && GET_CODE (prev_insn) == INSN
7997      && (tem = single_set (prev_insn)) != 0
7998      && SET_DEST (tem) == cc0_rtx
7999      && ! reg_mentioned_p (cc0_rtx, x))
8000    {
8001      PUT_CODE (prev_insn, NOTE);
8002      NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
8003      NOTE_SOURCE_FILE (prev_insn) = 0;
8004    }
8005
8006  prev_insn_cc0 = this_insn_cc0;
8007  prev_insn_cc0_mode = this_insn_cc0_mode;
8008#endif
8009
8010  prev_insn = insn;
8011}
8012
8013/* Remove from the hash table all expressions that reference memory.  */
8014static void
8015invalidate_memory ()
8016{
8017  register int i;
8018  register struct table_elt *p, *next;
8019
8020  for (i = 0; i < NBUCKETS; i++)
8021    for (p = table[i]; p; p = next)
8022      {
8023	next = p->next_same_hash;
8024	if (p->in_memory)
8025	  remove_from_table (p, i);
8026      }
8027}
8028
8029/* XXX ??? The name of this function bears little resemblance to
8030   what this function actually does.  FIXME.  */
8031static int
8032note_mem_written (addr)
8033     register rtx addr;
8034{
8035  /* Pushing or popping the stack invalidates just the stack pointer.  */
8036  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
8037       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
8038      && GET_CODE (XEXP (addr, 0)) == REG
8039      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
8040    {
8041      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
8042	REG_TICK (STACK_POINTER_REGNUM)++;
8043
8044      /* This should be *very* rare.  */
8045      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
8046	invalidate (stack_pointer_rtx, VOIDmode);
8047      return 1;
8048    }
8049  return 0;
8050}
8051
8052/* Perform invalidation on the basis of everything about an insn
8053   except for invalidating the actual places that are SET in it.
8054   This includes the places CLOBBERed, and anything that might
8055   alias with something that is SET or CLOBBERed.
8056
8057   X is the pattern of the insn.  */
8058
8059static void
8060invalidate_from_clobbers (x)
8061     rtx x;
8062{
8063  if (GET_CODE (x) == CLOBBER)
8064    {
8065      rtx ref = XEXP (x, 0);
8066      if (ref)
8067	{
8068	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8069	      || GET_CODE (ref) == MEM)
8070	    invalidate (ref, VOIDmode);
8071	  else if (GET_CODE (ref) == STRICT_LOW_PART
8072		   || GET_CODE (ref) == ZERO_EXTRACT)
8073	    invalidate (XEXP (ref, 0), GET_MODE (ref));
8074	}
8075    }
8076  else if (GET_CODE (x) == PARALLEL)
8077    {
8078      register int i;
8079      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8080	{
8081	  register rtx y = XVECEXP (x, 0, i);
8082	  if (GET_CODE (y) == CLOBBER)
8083	    {
8084	      rtx ref = XEXP (y, 0);
8085	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8086		  || GET_CODE (ref) == MEM)
8087		invalidate (ref, VOIDmode);
8088	      else if (GET_CODE (ref) == STRICT_LOW_PART
8089		       || GET_CODE (ref) == ZERO_EXTRACT)
8090		invalidate (XEXP (ref, 0), GET_MODE (ref));
8091	    }
8092	}
8093    }
8094}
8095
8096/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
8097   and replace any registers in them with either an equivalent constant
8098   or the canonical form of the register.  If we are inside an address,
8099   only do this if the address remains valid.
8100
8101   OBJECT is 0 except when within a MEM in which case it is the MEM.
8102
8103   Return the replacement for X.  */
8104
8105static rtx
8106cse_process_notes (x, object)
8107     rtx x;
8108     rtx object;
8109{
8110  enum rtx_code code = GET_CODE (x);
8111  char *fmt = GET_RTX_FORMAT (code);
8112  int i;
8113
8114  switch (code)
8115    {
8116    case CONST_INT:
8117    case CONST:
8118    case SYMBOL_REF:
8119    case LABEL_REF:
8120    case CONST_DOUBLE:
8121    case PC:
8122    case CC0:
8123    case LO_SUM:
8124      return x;
8125
8126    case MEM:
8127      XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8128      return x;
8129
8130    case EXPR_LIST:
8131    case INSN_LIST:
8132      if (REG_NOTE_KIND (x) == REG_EQUAL)
8133	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
8134      if (XEXP (x, 1))
8135	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
8136      return x;
8137
8138    case SIGN_EXTEND:
8139    case ZERO_EXTEND:
8140    case SUBREG:
8141      {
8142	rtx new = cse_process_notes (XEXP (x, 0), object);
8143	/* We don't substitute VOIDmode constants into these rtx,
8144	   since they would impede folding.  */
8145	if (GET_MODE (new) != VOIDmode)
8146	  validate_change (object, &XEXP (x, 0), new, 0);
8147	return x;
8148      }
8149
8150    case REG:
8151      i = REG_QTY (REGNO (x));
8152
8153      /* Return a constant or a constant register.  */
8154      if (REGNO_QTY_VALID_P (REGNO (x))
8155	  && qty_const[i] != 0
8156	  && (CONSTANT_P (qty_const[i])
8157	      || GET_CODE (qty_const[i]) == REG))
8158	{
8159	  rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8160	  if (new)
8161	    return new;
8162	}
8163
8164      /* Otherwise, canonicalize this register.  */
8165      return canon_reg (x, NULL_RTX);
8166
8167    default:
8168      break;
8169    }
8170
8171  for (i = 0; i < GET_RTX_LENGTH (code); i++)
8172    if (fmt[i] == 'e')
8173      validate_change (object, &XEXP (x, i),
8174		       cse_process_notes (XEXP (x, i), object), 0);
8175
8176  return x;
8177}
8178
8179/* Find common subexpressions between the end test of a loop and the beginning
8180   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
8181
8182   Often we have a loop where an expression in the exit test is used
8183   in the body of the loop.  For example "while (*p) *q++ = *p++;".
8184   Because of the way we duplicate the loop exit test in front of the loop,
8185   however, we don't detect that common subexpression.  This will be caught
8186   when global cse is implemented, but this is a quite common case.
8187
8188   This function handles the most common cases of these common expressions.
8189   It is called after we have processed the basic block ending with the
8190   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8191   jumps to a label used only once.  */
8192
8193static void
8194cse_around_loop (loop_start)
8195     rtx loop_start;
8196{
8197  rtx insn;
8198  int i;
8199  struct table_elt *p;
8200
8201  /* If the jump at the end of the loop doesn't go to the start, we don't
8202     do anything.  */
8203  for (insn = PREV_INSN (loop_start);
8204       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8205       insn = PREV_INSN (insn))
8206    ;
8207
8208  if (insn == 0
8209      || GET_CODE (insn) != NOTE
8210      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8211    return;
8212
8213  /* If the last insn of the loop (the end test) was an NE comparison,
8214     we will interpret it as an EQ comparison, since we fell through
8215     the loop.  Any equivalences resulting from that comparison are
8216     therefore not valid and must be invalidated.  */
8217  if (last_jump_equiv_class)
8218    for (p = last_jump_equiv_class->first_same_value; p;
8219	 p = p->next_same_value)
8220      {
8221        if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8222	    || (GET_CODE (p->exp) == SUBREG
8223	        && GET_CODE (SUBREG_REG (p->exp)) == REG))
8224	  invalidate (p->exp, VOIDmode);
8225        else if (GET_CODE (p->exp) == STRICT_LOW_PART
8226	         || GET_CODE (p->exp) == ZERO_EXTRACT)
8227	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8228      }
8229
8230  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8231     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8232
8233     The only thing we do with SET_DEST is invalidate entries, so we
8234     can safely process each SET in order.  It is slightly less efficient
8235     to do so, but we only want to handle the most common cases.
8236
8237     The gen_move_insn call in cse_set_around_loop may create new pseudos.
8238     These pseudos won't have valid entries in any of the tables indexed
8239     by register number, such as reg_qty.  We avoid out-of-range array
8240     accesses by not processing any instructions created after cse started.  */
8241
8242  for (insn = NEXT_INSN (loop_start);
8243       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
8244       && INSN_UID (insn) < max_insn_uid
8245       && ! (GET_CODE (insn) == NOTE
8246	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8247       insn = NEXT_INSN (insn))
8248    {
8249      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8250	  && (GET_CODE (PATTERN (insn)) == SET
8251	      || GET_CODE (PATTERN (insn)) == CLOBBER))
8252	cse_set_around_loop (PATTERN (insn), insn, loop_start);
8253      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8254	       && GET_CODE (PATTERN (insn)) == PARALLEL)
8255	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8256	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8257	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8258	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8259				 loop_start);
8260    }
8261}
8262
8263/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
8264   since they are done elsewhere.  This function is called via note_stores.  */
8265
8266static void
8267invalidate_skipped_set (dest, set)
8268     rtx set;
8269     rtx dest;
8270{
8271  enum rtx_code code = GET_CODE (dest);
8272
8273  if (code == MEM
8274      && ! note_mem_written (dest)	/* If this is not a stack push ... */
8275      /* There are times when an address can appear varying and be a PLUS
8276	 during this scan when it would be a fixed address were we to know
8277	 the proper equivalences.  So invalidate all memory if there is
8278	 a BLKmode or nonscalar memory reference or a reference to a
8279	 variable address.  */
8280      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8281	  || cse_rtx_varies_p (XEXP (dest, 0))))
8282    {
8283      invalidate_memory ();
8284      return;
8285    }
8286
8287  if (GET_CODE (set) == CLOBBER
8288#ifdef HAVE_cc0
8289      || dest == cc0_rtx
8290#endif
8291      || dest == pc_rtx)
8292    return;
8293
8294  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
8295    invalidate (XEXP (dest, 0), GET_MODE (dest));
8296  else if (code == REG || code == SUBREG || code == MEM)
8297    invalidate (dest, VOIDmode);
8298}
8299
8300/* Invalidate all insns from START up to the end of the function or the
8301   next label.  This called when we wish to CSE around a block that is
8302   conditionally executed.  */
8303
8304static void
8305invalidate_skipped_block (start)
8306     rtx start;
8307{
8308  rtx insn;
8309
8310  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8311       insn = NEXT_INSN (insn))
8312    {
8313      if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8314	continue;
8315
8316      if (GET_CODE (insn) == CALL_INSN)
8317	{
8318	  if (! CONST_CALL_P (insn))
8319	    invalidate_memory ();
8320	  invalidate_for_call ();
8321	}
8322
8323      invalidate_from_clobbers (PATTERN (insn));
8324      note_stores (PATTERN (insn), invalidate_skipped_set);
8325    }
8326}
8327
8328/* Used for communication between the following two routines; contains a
8329   value to be checked for modification.  */
8330
8331static rtx cse_check_loop_start_value;
8332
8333/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8334   indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0.  */
8335
8336static void
8337cse_check_loop_start (x, set)
8338     rtx x;
8339     rtx set ATTRIBUTE_UNUSED;
8340{
8341  if (cse_check_loop_start_value == 0
8342      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8343    return;
8344
8345  if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8346      || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8347    cse_check_loop_start_value = 0;
8348}
8349
8350/* X is a SET or CLOBBER contained in INSN that was found near the start of
8351   a loop that starts with the label at LOOP_START.
8352
8353   If X is a SET, we see if its SET_SRC is currently in our hash table.
8354   If so, we see if it has a value equal to some register used only in the
8355   loop exit code (as marked by jump.c).
8356
8357   If those two conditions are true, we search backwards from the start of
8358   the loop to see if that same value was loaded into a register that still
8359   retains its value at the start of the loop.
8360
8361   If so, we insert an insn after the load to copy the destination of that
8362   load into the equivalent register and (try to) replace our SET_SRC with that
8363   register.
8364
8365   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
8366
8367static void
8368cse_set_around_loop (x, insn, loop_start)
8369     rtx x;
8370     rtx insn;
8371     rtx loop_start;
8372{
8373  struct table_elt *src_elt;
8374
8375  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8376     are setting PC or CC0 or whose SET_SRC is already a register.  */
8377  if (GET_CODE (x) == SET
8378      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8379      && GET_CODE (SET_SRC (x)) != REG)
8380    {
8381      src_elt = lookup (SET_SRC (x),
8382			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8383			GET_MODE (SET_DEST (x)));
8384
8385      if (src_elt)
8386	for (src_elt = src_elt->first_same_value; src_elt;
8387	     src_elt = src_elt->next_same_value)
8388	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8389	      && COST (src_elt->exp) < COST (SET_SRC (x)))
8390	    {
8391	      rtx p, set;
8392
8393	      /* Look for an insn in front of LOOP_START that sets
8394		 something in the desired mode to SET_SRC (x) before we hit
8395		 a label or CALL_INSN.  */
8396
8397	      for (p = prev_nonnote_insn (loop_start);
8398		   p && GET_CODE (p) != CALL_INSN
8399		   && GET_CODE (p) != CODE_LABEL;
8400		   p = prev_nonnote_insn  (p))
8401		if ((set = single_set (p)) != 0
8402		    && GET_CODE (SET_DEST (set)) == REG
8403		    && GET_MODE (SET_DEST (set)) == src_elt->mode
8404		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8405		  {
8406		    /* We now have to ensure that nothing between P
8407		       and LOOP_START modified anything referenced in
8408		       SET_SRC (x).  We know that nothing within the loop
8409		       can modify it, or we would have invalidated it in
8410		       the hash table.  */
8411		    rtx q;
8412
8413		    cse_check_loop_start_value = SET_SRC (x);
8414		    for (q = p; q != loop_start; q = NEXT_INSN (q))
8415		      if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8416			note_stores (PATTERN (q), cse_check_loop_start);
8417
8418		    /* If nothing was changed and we can replace our
8419		       SET_SRC, add an insn after P to copy its destination
8420		       to what we will be replacing SET_SRC with.  */
8421		    if (cse_check_loop_start_value
8422			&& validate_change (insn, &SET_SRC (x),
8423					    src_elt->exp, 0))
8424		      {
8425			/* If this creates new pseudos, this is unsafe,
8426			   because the regno of new pseudo is unsuitable
8427			   to index into reg_qty when cse_insn processes
8428			   the new insn.  Therefore, if a new pseudo was
8429			   created, discard this optimization.  */
8430			int nregs = max_reg_num ();
8431			rtx move
8432			  = gen_move_insn (src_elt->exp, SET_DEST (set));
8433			if (nregs != max_reg_num ())
8434			  {
8435			    if (! validate_change (insn, &SET_SRC (x),
8436						   SET_SRC (set), 0))
8437			      abort ();
8438			  }
8439			else
8440			  emit_insn_after (move, p);
8441		      }
8442		    break;
8443		  }
8444	    }
8445    }
8446
8447  /* Now invalidate anything modified by X.  */
8448  note_mem_written (SET_DEST (x));
8449
8450  /* See comment on similar code in cse_insn for explanation of these tests.  */
8451  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8452      || GET_CODE (SET_DEST (x)) == MEM)
8453    invalidate (SET_DEST (x), VOIDmode);
8454  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8455	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8456    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8457}
8458
8459/* Find the end of INSN's basic block and return its range,
8460   the total number of SETs in all the insns of the block, the last insn of the
8461   block, and the branch path.
8462
8463   The branch path indicates which branches should be followed.  If a non-zero
8464   path size is specified, the block should be rescanned and a different set
8465   of branches will be taken.  The branch path is only used if
8466   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8467
8468   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8469   used to describe the block.  It is filled in with the information about
8470   the current block.  The incoming structure's branch path, if any, is used
8471   to construct the output branch path.  */
8472
8473void
8474cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8475     rtx insn;
8476     struct cse_basic_block_data *data;
8477     int follow_jumps;
8478     int after_loop;
8479     int skip_blocks;
8480{
8481  rtx p = insn, q;
8482  int nsets = 0;
8483  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8484  rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8485  int path_size = data->path_size;
8486  int path_entry = 0;
8487  int i;
8488
8489  /* Update the previous branch path, if any.  If the last branch was
8490     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
8491     shorten the path by one and look at the previous branch.  We know that
8492     at least one branch must have been taken if PATH_SIZE is non-zero.  */
8493  while (path_size > 0)
8494    {
8495      if (data->path[path_size - 1].status != NOT_TAKEN)
8496	{
8497	  data->path[path_size - 1].status = NOT_TAKEN;
8498	  break;
8499	}
8500      else
8501	path_size--;
8502    }
8503
8504  /* Scan to end of this basic block.  */
8505  while (p && GET_CODE (p) != CODE_LABEL)
8506    {
8507      /* Don't cse out the end of a loop.  This makes a difference
8508	 only for the unusual loops that always execute at least once;
8509	 all other loops have labels there so we will stop in any case.
8510	 Cse'ing out the end of the loop is dangerous because it
8511	 might cause an invariant expression inside the loop
8512	 to be reused after the end of the loop.  This would make it
8513	 hard to move the expression out of the loop in loop.c,
8514	 especially if it is one of several equivalent expressions
8515	 and loop.c would like to eliminate it.
8516
8517	 If we are running after loop.c has finished, we can ignore
8518	 the NOTE_INSN_LOOP_END.  */
8519
8520      if (! after_loop && GET_CODE (p) == NOTE
8521	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8522	break;
8523
8524      /* Don't cse over a call to setjmp; on some machines (eg vax)
8525	 the regs restored by the longjmp come from
8526	 a later time than the setjmp.  */
8527      if (GET_CODE (p) == NOTE
8528	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8529	break;
8530
8531      /* A PARALLEL can have lots of SETs in it,
8532	 especially if it is really an ASM_OPERANDS.  */
8533      if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8534	  && GET_CODE (PATTERN (p)) == PARALLEL)
8535	nsets += XVECLEN (PATTERN (p), 0);
8536      else if (GET_CODE (p) != NOTE)
8537	nsets += 1;
8538
8539      /* Ignore insns made by CSE; they cannot affect the boundaries of
8540	 the basic block.  */
8541
8542      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8543	high_cuid = INSN_CUID (p);
8544      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8545	low_cuid = INSN_CUID (p);
8546
8547      /* See if this insn is in our branch path.  If it is and we are to
8548	 take it, do so.  */
8549      if (path_entry < path_size && data->path[path_entry].branch == p)
8550	{
8551	  if (data->path[path_entry].status != NOT_TAKEN)
8552	    p = JUMP_LABEL (p);
8553
8554	  /* Point to next entry in path, if any.  */
8555	  path_entry++;
8556	}
8557
8558      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8559	 was specified, we haven't reached our maximum path length, there are
8560	 insns following the target of the jump, this is the only use of the
8561	 jump label, and the target label is preceded by a BARRIER.
8562
8563	 Alternatively, we can follow the jump if it branches around a
8564	 block of code and there are no other branches into the block.
8565	 In this case invalidate_skipped_block will be called to invalidate any
8566	 registers set in the block when following the jump.  */
8567
8568      else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8569	       && GET_CODE (p) == JUMP_INSN
8570      	       && GET_CODE (PATTERN (p)) == SET
8571	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8572	       && JUMP_LABEL (p) != 0
8573	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
8574	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
8575	{
8576	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8577	    if ((GET_CODE (q) != NOTE
8578	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8579	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8580	        && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8581	      break;
8582
8583	  /* If we ran into a BARRIER, this code is an extension of the
8584	     basic block when the branch is taken.  */
8585	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8586	    {
8587	      /* Don't allow ourself to keep walking around an
8588		 always-executed loop.  */
8589	      if (next_real_insn (q) == next)
8590		{
8591		  p = NEXT_INSN (p);
8592		  continue;
8593		}
8594
8595	      /* Similarly, don't put a branch in our path more than once.  */
8596	      for (i = 0; i < path_entry; i++)
8597		if (data->path[i].branch == p)
8598		  break;
8599
8600	      if (i != path_entry)
8601		break;
8602
8603	      data->path[path_entry].branch = p;
8604	      data->path[path_entry++].status = TAKEN;
8605
8606	      /* This branch now ends our path.  It was possible that we
8607		 didn't see this branch the last time around (when the
8608		 insn in front of the target was a JUMP_INSN that was
8609		 turned into a no-op).  */
8610	      path_size = path_entry;
8611
8612	      p = JUMP_LABEL (p);
8613	      /* Mark block so we won't scan it again later.  */
8614	      PUT_MODE (NEXT_INSN (p), QImode);
8615	    }
8616	  /* Detect a branch around a block of code.  */
8617	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8618	    {
8619	      register rtx tmp;
8620
8621	      if (next_real_insn (q) == next)
8622		{
8623		  p = NEXT_INSN (p);
8624		  continue;
8625		}
8626
8627	      for (i = 0; i < path_entry; i++)
8628		if (data->path[i].branch == p)
8629		  break;
8630
8631	      if (i != path_entry)
8632		break;
8633
8634	      /* This is no_labels_between_p (p, q) with an added check for
8635		 reaching the end of a function (in case Q precedes P).  */
8636	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8637		if (GET_CODE (tmp) == CODE_LABEL)
8638		  break;
8639
8640	      if (tmp == q)
8641		{
8642		  data->path[path_entry].branch = p;
8643		  data->path[path_entry++].status = AROUND;
8644
8645		  path_size = path_entry;
8646
8647		  p = JUMP_LABEL (p);
8648		  /* Mark block so we won't scan it again later.  */
8649		  PUT_MODE (NEXT_INSN (p), QImode);
8650		}
8651	    }
8652	}
8653      p = NEXT_INSN (p);
8654    }
8655
8656  data->low_cuid = low_cuid;
8657  data->high_cuid = high_cuid;
8658  data->nsets = nsets;
8659  data->last = p;
8660
8661  /* If all jumps in the path are not taken, set our path length to zero
8662     so a rescan won't be done.  */
8663  for (i = path_size - 1; i >= 0; i--)
8664    if (data->path[i].status != NOT_TAKEN)
8665      break;
8666
8667  if (i == -1)
8668    data->path_size = 0;
8669  else
8670    data->path_size = path_size;
8671
8672  /* End the current branch path.  */
8673  data->path[path_size].branch = 0;
8674}
8675
8676/* Perform cse on the instructions of a function.
8677   F is the first instruction.
8678   NREGS is one plus the highest pseudo-reg number used in the instruction.
8679
8680   AFTER_LOOP is 1 if this is the cse call done after loop optimization
8681   (only if -frerun-cse-after-loop).
8682
8683   Returns 1 if jump_optimize should be redone due to simplifications
8684   in conditional jump instructions.  */
8685
8686int
8687cse_main (f, nregs, after_loop, file)
8688     rtx f;
8689     int nregs;
8690     int after_loop;
8691     FILE *file;
8692{
8693  struct cse_basic_block_data val;
8694  register rtx insn = f;
8695  register int i;
8696
8697  cse_jumps_altered = 0;
8698  recorded_label_ref = 0;
8699  constant_pool_entries_cost = 0;
8700  val.path_size = 0;
8701
8702  init_recog ();
8703  init_alias_analysis ();
8704
8705  max_reg = nregs;
8706
8707  max_insn_uid = get_max_uid ();
8708
8709  reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8710  reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8711
8712#ifdef LOAD_EXTEND_OP
8713
8714  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
8715     and change the code and mode as appropriate.  */
8716  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8717#endif
8718
8719  /* Discard all the free elements of the previous function
8720     since they are allocated in the temporarily obstack.  */
8721  bzero ((char *) table, sizeof table);
8722  free_element_chain = 0;
8723  n_elements_made = 0;
8724
8725  /* Find the largest uid.  */
8726
8727  max_uid = get_max_uid ();
8728  uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8729  bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8730
8731  /* Compute the mapping from uids to cuids.
8732     CUIDs are numbers assigned to insns, like uids,
8733     except that cuids increase monotonically through the code.
8734     Don't assign cuids to line-number NOTEs, so that the distance in cuids
8735     between two insns is not affected by -g.  */
8736
8737  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8738    {
8739      if (GET_CODE (insn) != NOTE
8740	  || NOTE_LINE_NUMBER (insn) < 0)
8741	INSN_CUID (insn) = ++i;
8742      else
8743	/* Give a line number note the same cuid as preceding insn.  */
8744	INSN_CUID (insn) = i;
8745    }
8746
8747  /* Initialize which registers are clobbered by calls.  */
8748
8749  CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8750
8751  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8752    if ((call_used_regs[i]
8753	 /* Used to check !fixed_regs[i] here, but that isn't safe;
8754	    fixed regs are still call-clobbered, and sched can get
8755	    confused if they can "live across calls".
8756
8757	    The frame pointer is always preserved across calls.  The arg
8758	    pointer is if it is fixed.  The stack pointer usually is, unless
8759	    RETURN_POPS_ARGS, in which case an explicit CLOBBER
8760	    will be present.  If we are generating PIC code, the PIC offset
8761	    table register is preserved across calls.  */
8762
8763	 && i != STACK_POINTER_REGNUM
8764	 && i != FRAME_POINTER_REGNUM
8765#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8766	 && i != HARD_FRAME_POINTER_REGNUM
8767#endif
8768#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8769	 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8770#endif
8771#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8772	 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8773#endif
8774	 )
8775	|| global_regs[i])
8776      SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8777
8778  /* Loop over basic blocks.
8779     Compute the maximum number of qty's needed for each basic block
8780     (which is 2 for each SET).  */
8781  insn = f;
8782  while (insn)
8783    {
8784      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8785			      flag_cse_skip_blocks);
8786
8787      /* If this basic block was already processed or has no sets, skip it.  */
8788      if (val.nsets == 0 || GET_MODE (insn) == QImode)
8789	{
8790	  PUT_MODE (insn, VOIDmode);
8791	  insn = (val.last ? NEXT_INSN (val.last) : 0);
8792	  val.path_size = 0;
8793	  continue;
8794	}
8795
8796      cse_basic_block_start = val.low_cuid;
8797      cse_basic_block_end = val.high_cuid;
8798      max_qty = val.nsets * 2;
8799
8800      if (file)
8801	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
8802		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8803		 val.nsets);
8804
8805      /* Make MAX_QTY bigger to give us room to optimize
8806	 past the end of this basic block, if that should prove useful.  */
8807      if (max_qty < 500)
8808	max_qty = 500;
8809
8810      max_qty += max_reg;
8811
8812      /* If this basic block is being extended by following certain jumps,
8813         (see `cse_end_of_basic_block'), we reprocess the code from the start.
8814         Otherwise, we start after this basic block.  */
8815      if (val.path_size > 0)
8816        cse_basic_block (insn, val.last, val.path, 0);
8817      else
8818	{
8819	  int old_cse_jumps_altered = cse_jumps_altered;
8820	  rtx temp;
8821
8822	  /* When cse changes a conditional jump to an unconditional
8823	     jump, we want to reprocess the block, since it will give
8824	     us a new branch path to investigate.  */
8825	  cse_jumps_altered = 0;
8826	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8827	  if (cse_jumps_altered == 0
8828	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8829	    insn = temp;
8830
8831	  cse_jumps_altered |= old_cse_jumps_altered;
8832	}
8833
8834#ifdef USE_C_ALLOCA
8835      alloca (0);
8836#endif
8837    }
8838
8839  /* Tell refers_to_mem_p that qty_const info is not available.  */
8840  qty_const = 0;
8841
8842  if (max_elements_made < n_elements_made)
8843    max_elements_made = n_elements_made;
8844
8845  return cse_jumps_altered || recorded_label_ref;
8846}
8847
8848/* Process a single basic block.  FROM and TO and the limits of the basic
8849   block.  NEXT_BRANCH points to the branch path when following jumps or
8850   a null path when not following jumps.
8851
8852   AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8853   loop.  This is true when we are being called for the last time on a
8854   block and this CSE pass is before loop.c.  */
8855
8856static rtx
8857cse_basic_block (from, to, next_branch, around_loop)
8858     register rtx from, to;
8859     struct branch_path *next_branch;
8860     int around_loop;
8861{
8862  register rtx insn;
8863  int to_usage = 0;
8864  rtx libcall_insn = NULL_RTX;
8865  int num_insns = 0;
8866
8867  /* Each of these arrays is undefined before max_reg, so only allocate
8868     the space actually needed and adjust the start below.  */
8869
8870  qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8871  qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8872  qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8873  qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8874  qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8875  qty_comparison_code
8876    = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8877  qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8878  qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8879
8880  qty_first_reg -= max_reg;
8881  qty_last_reg -= max_reg;
8882  qty_mode -= max_reg;
8883  qty_const -= max_reg;
8884  qty_const_insn -= max_reg;
8885  qty_comparison_code -= max_reg;
8886  qty_comparison_qty -= max_reg;
8887  qty_comparison_const -= max_reg;
8888
8889  new_basic_block ();
8890
8891  /* TO might be a label.  If so, protect it from being deleted.  */
8892  if (to != 0 && GET_CODE (to) == CODE_LABEL)
8893    ++LABEL_NUSES (to);
8894
8895  for (insn = from; insn != to; insn = NEXT_INSN (insn))
8896    {
8897      register enum rtx_code code = GET_CODE (insn);
8898
8899      /* If we have processed 1,000 insns, flush the hash table to
8900	 avoid extreme quadratic behavior.  We must not include NOTEs
8901	 in the count since there may be more or them when generating
8902	 debugging information.  If we clear the table at different
8903	 times, code generated with -g -O might be different than code
8904	 generated with -O but not -g.
8905
8906	 ??? This is a real kludge and needs to be done some other way.
8907	 Perhaps for 2.9.  */
8908      if (code != NOTE && num_insns++ > 1000)
8909	{
8910	  flush_hash_table ();
8911	  num_insns = 0;
8912	}
8913
8914      /* See if this is a branch that is part of the path.  If so, and it is
8915	 to be taken, do so.  */
8916      if (next_branch->branch == insn)
8917	{
8918	  enum taken status = next_branch++->status;
8919	  if (status != NOT_TAKEN)
8920	    {
8921	      if (status == TAKEN)
8922		record_jump_equiv (insn, 1);
8923	      else
8924		invalidate_skipped_block (NEXT_INSN (insn));
8925
8926	      /* Set the last insn as the jump insn; it doesn't affect cc0.
8927		 Then follow this branch.  */
8928#ifdef HAVE_cc0
8929	      prev_insn_cc0 = 0;
8930#endif
8931	      prev_insn = insn;
8932	      insn = JUMP_LABEL (insn);
8933	      continue;
8934	    }
8935	}
8936
8937      if (GET_MODE (insn) == QImode)
8938	PUT_MODE (insn, VOIDmode);
8939
8940      if (GET_RTX_CLASS (code) == 'i')
8941	{
8942	  rtx p;
8943
8944	  /* Process notes first so we have all notes in canonical forms when
8945	     looking for duplicate operations.  */
8946
8947	  if (REG_NOTES (insn))
8948	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8949
8950	  /* Track when we are inside in LIBCALL block.  Inside such a block,
8951	     we do not want to record destinations.  The last insn of a
8952	     LIBCALL block is not considered to be part of the block, since
8953	     its destination is the result of the block and hence should be
8954	     recorded.  */
8955
8956	  if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
8957	    libcall_insn = XEXP (p, 0);
8958	  else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8959	    libcall_insn = NULL_RTX;
8960
8961	  cse_insn (insn, libcall_insn);
8962	}
8963
8964      /* If INSN is now an unconditional jump, skip to the end of our
8965	 basic block by pretending that we just did the last insn in the
8966	 basic block.  If we are jumping to the end of our block, show
8967	 that we can have one usage of TO.  */
8968
8969      if (simplejump_p (insn))
8970	{
8971	  if (to == 0)
8972	    return 0;
8973
8974	  if (JUMP_LABEL (insn) == to)
8975	    to_usage = 1;
8976
8977	  /* Maybe TO was deleted because the jump is unconditional.
8978	     If so, there is nothing left in this basic block.  */
8979	  /* ??? Perhaps it would be smarter to set TO
8980	     to whatever follows this insn,
8981	     and pretend the basic block had always ended here.  */
8982	  if (INSN_DELETED_P (to))
8983	    break;
8984
8985	  insn = PREV_INSN (to);
8986	}
8987
8988      /* See if it is ok to keep on going past the label
8989	 which used to end our basic block.  Remember that we incremented
8990	 the count of that label, so we decrement it here.  If we made
8991	 a jump unconditional, TO_USAGE will be one; in that case, we don't
8992	 want to count the use in that jump.  */
8993
8994      if (to != 0 && NEXT_INSN (insn) == to
8995	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8996	{
8997	  struct cse_basic_block_data val;
8998	  rtx prev;
8999
9000	  insn = NEXT_INSN (to);
9001
9002	  if (LABEL_NUSES (to) == 0)
9003	    insn = delete_insn (to);
9004
9005	  /* If TO was the last insn in the function, we are done.  */
9006	  if (insn == 0)
9007	    return 0;
9008
9009	  /* If TO was preceded by a BARRIER we are done with this block
9010	     because it has no continuation.  */
9011	  prev = prev_nonnote_insn (to);
9012	  if (prev && GET_CODE (prev) == BARRIER)
9013	    return insn;
9014
9015	  /* Find the end of the following block.  Note that we won't be
9016	     following branches in this case.  */
9017	  to_usage = 0;
9018	  val.path_size = 0;
9019	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
9020
9021	  /* If the tables we allocated have enough space left
9022	     to handle all the SETs in the next basic block,
9023	     continue through it.  Otherwise, return,
9024	     and that block will be scanned individually.  */
9025	  if (val.nsets * 2 + next_qty > max_qty)
9026	    break;
9027
9028	  cse_basic_block_start = val.low_cuid;
9029	  cse_basic_block_end = val.high_cuid;
9030	  to = val.last;
9031
9032	  /* Prevent TO from being deleted if it is a label.  */
9033	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
9034	    ++LABEL_NUSES (to);
9035
9036	  /* Back up so we process the first insn in the extension.  */
9037	  insn = PREV_INSN (insn);
9038	}
9039    }
9040
9041  if (next_qty > max_qty)
9042    abort ();
9043
9044  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
9045     the previous insn is the only insn that branches to the head of a loop,
9046     we can cse into the loop.  Don't do this if we changed the jump
9047     structure of a loop unless we aren't going to be following jumps.  */
9048
9049  if ((cse_jumps_altered == 0
9050       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
9051      && around_loop && to != 0
9052      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
9053      && GET_CODE (PREV_INSN (to)) == JUMP_INSN
9054      && JUMP_LABEL (PREV_INSN (to)) != 0
9055      && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
9056    cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
9057
9058  return to ? NEXT_INSN (to) : 0;
9059}
9060
9061/* Count the number of times registers are used (not set) in X.
9062   COUNTS is an array in which we accumulate the count, INCR is how much
9063   we count each register usage.
9064
9065   Don't count a usage of DEST, which is the SET_DEST of a SET which
9066   contains X in its SET_SRC.  This is because such a SET does not
9067   modify the liveness of DEST.  */
9068
9069static void
9070count_reg_usage (x, counts, dest, incr)
9071     rtx x;
9072     int *counts;
9073     rtx dest;
9074     int incr;
9075{
9076  enum rtx_code code;
9077  char *fmt;
9078  int i, j;
9079
9080  if (x == 0)
9081    return;
9082
9083  switch (code = GET_CODE (x))
9084    {
9085    case REG:
9086      if (x != dest)
9087	counts[REGNO (x)] += incr;
9088      return;
9089
9090    case PC:
9091    case CC0:
9092    case CONST:
9093    case CONST_INT:
9094    case CONST_DOUBLE:
9095    case SYMBOL_REF:
9096    case LABEL_REF:
9097      return;
9098
9099    case CLOBBER:
9100      /* If we are clobbering a MEM, mark any registers inside the address
9101         as being used.  */
9102      if (GET_CODE (XEXP (x, 0)) == MEM)
9103	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
9104      return;
9105
9106    case SET:
9107      /* Unless we are setting a REG, count everything in SET_DEST.  */
9108      if (GET_CODE (SET_DEST (x)) != REG)
9109	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9110
9111      /* If SRC has side-effects, then we can't delete this insn, so the
9112	 usage of SET_DEST inside SRC counts.
9113
9114	 ??? Strictly-speaking, we might be preserving this insn
9115	 because some other SET has side-effects, but that's hard
9116	 to do and can't happen now.  */
9117      count_reg_usage (SET_SRC (x), counts,
9118		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9119		       incr);
9120      return;
9121
9122    case CALL_INSN:
9123      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9124
9125      /* ... falls through ...  */
9126    case INSN:
9127    case JUMP_INSN:
9128      count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
9129
9130      /* Things used in a REG_EQUAL note aren't dead since loop may try to
9131	 use them.  */
9132
9133      count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
9134      return;
9135
9136    case EXPR_LIST:
9137    case INSN_LIST:
9138      if (REG_NOTE_KIND (x) == REG_EQUAL
9139	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
9140	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
9141      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
9142      return;
9143
9144    default:
9145      break;
9146    }
9147
9148  fmt = GET_RTX_FORMAT (code);
9149  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9150    {
9151      if (fmt[i] == 'e')
9152	count_reg_usage (XEXP (x, i), counts, dest, incr);
9153      else if (fmt[i] == 'E')
9154	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9155	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
9156    }
9157}
9158
9159/* Scan all the insns and delete any that are dead; i.e., they store a register
9160   that is never used or they copy a register to itself.
9161
9162   This is used to remove insns made obviously dead by cse, loop or other
9163   optimizations.  It improves the heuristics in loop since it won't try to
9164   move dead invariants out of loops or make givs for dead quantities.  The
9165   remaining passes of the compilation are also sped up.  */
9166
9167void
9168delete_trivially_dead_insns (insns, nreg)
9169     rtx insns;
9170     int nreg;
9171{
9172  int *counts = (int *) alloca (nreg * sizeof (int));
9173  rtx insn, prev;
9174#ifdef HAVE_cc0
9175  rtx tem;
9176#endif
9177  int i;
9178  int in_libcall = 0, dead_libcall = 0;
9179
9180  /* First count the number of times each register is used.  */
9181  bzero ((char *) counts, sizeof (int) * nreg);
9182  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9183    count_reg_usage (insn, counts, NULL_RTX, 1);
9184
9185  /* Go from the last insn to the first and delete insns that only set unused
9186     registers or copy a register to itself.  As we delete an insn, remove
9187     usage counts for registers it uses.  */
9188  for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
9189    {
9190      int live_insn = 0;
9191      rtx note;
9192
9193      prev = prev_real_insn (insn);
9194
9195      /* Don't delete any insns that are part of a libcall block unless
9196	 we can delete the whole libcall block.
9197
9198	 Flow or loop might get confused if we did that.  Remember
9199	 that we are scanning backwards.  */
9200      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
9201	{
9202	  in_libcall = 1;
9203	  live_insn = 1;
9204	  dead_libcall = 0;
9205
9206	  /* See if there's a REG_EQUAL note on this insn and try to
9207	     replace the source with the REG_EQUAL expression.
9208
9209	     We assume that insns with REG_RETVALs can only be reg->reg
9210	     copies at this point.  */
9211	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9212	  if (note)
9213	    {
9214	      rtx set = single_set (insn);
9215	      if (set
9216		  && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9217		{
9218		  remove_note (insn,
9219			       find_reg_note (insn, REG_RETVAL, NULL_RTX));
9220		  dead_libcall = 1;
9221		}
9222	    }
9223	}
9224      else if (in_libcall)
9225	live_insn = ! dead_libcall;
9226      else if (GET_CODE (PATTERN (insn)) == SET)
9227	{
9228	  if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9229	      && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9230	    ;
9231
9232#ifdef HAVE_cc0
9233	  else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9234		   && ! side_effects_p (SET_SRC (PATTERN (insn)))
9235		   && ((tem = next_nonnote_insn (insn)) == 0
9236		       || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9237		       || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9238	    ;
9239#endif
9240	  else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9241		   || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9242		   || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9243		   || side_effects_p (SET_SRC (PATTERN (insn))))
9244	    live_insn = 1;
9245	}
9246      else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9247	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9248	  {
9249	    rtx elt = XVECEXP (PATTERN (insn), 0, i);
9250
9251	    if (GET_CODE (elt) == SET)
9252	      {
9253		if (GET_CODE (SET_DEST (elt)) == REG
9254		    && SET_DEST (elt) == SET_SRC (elt))
9255		  ;
9256
9257#ifdef HAVE_cc0
9258		else if (GET_CODE (SET_DEST (elt)) == CC0
9259			 && ! side_effects_p (SET_SRC (elt))
9260			 && ((tem = next_nonnote_insn (insn)) == 0
9261			     || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9262			     || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9263		  ;
9264#endif
9265		else if (GET_CODE (SET_DEST (elt)) != REG
9266			 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9267			 || counts[REGNO (SET_DEST (elt))] != 0
9268			 || side_effects_p (SET_SRC (elt)))
9269		  live_insn = 1;
9270	      }
9271	    else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9272	      live_insn = 1;
9273	  }
9274      else
9275	live_insn = 1;
9276
9277      /* If this is a dead insn, delete it and show registers in it aren't
9278	 being used.  */
9279
9280      if (! live_insn)
9281	{
9282	  count_reg_usage (insn, counts, NULL_RTX, -1);
9283	  delete_insn (insn);
9284	}
9285
9286      if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
9287	{
9288	  in_libcall = 0;
9289	  dead_libcall = 0;
9290	}
9291    }
9292}
9293